Search in sources :

Example 1 with Credentials

use of org.apache.hadoop.security.Credentials in project hive by apache.

the class LlapBaseInputFormat method constructSubmitWorkRequestProto.

private SubmitWorkRequestProto constructSubmitWorkRequestProto(SubmitWorkInfo submitWorkInfo, int taskNum, int attemptNum, InetSocketAddress address, Token<JobTokenIdentifier> token, byte[] fragmentBytes, byte[] fragmentBytesSignature, JobConf job) throws IOException {
    ApplicationId appId = submitWorkInfo.getFakeAppId();
    // This works, assuming the executor is running within YARN.
    String user = System.getenv(ApplicationConstants.Environment.USER.name());
    LOG.info("Setting user in submitWorkRequest to: " + user);
    ContainerId containerId = ContainerId.newInstance(ApplicationAttemptId.newInstance(appId, attemptNum), taskNum);
    // Credentials can change across DAGs. Ideally construct only once per DAG.
    Credentials credentials = new Credentials();
    TokenCache.setSessionToken(token, credentials);
    ByteBuffer credentialsBinary = serializeCredentials(credentials);
    FragmentRuntimeInfo.Builder runtimeInfo = FragmentRuntimeInfo.newBuilder();
    runtimeInfo.setCurrentAttemptStartTime(System.currentTimeMillis());
    runtimeInfo.setWithinDagPriority(0);
    runtimeInfo.setDagStartTime(submitWorkInfo.getCreationTime());
    runtimeInfo.setFirstAttemptStartTime(submitWorkInfo.getCreationTime());
    runtimeInfo.setNumSelfAndUpstreamTasks(submitWorkInfo.getVertexParallelism());
    runtimeInfo.setNumSelfAndUpstreamCompletedTasks(0);
    SubmitWorkRequestProto.Builder builder = SubmitWorkRequestProto.newBuilder();
    VertexOrBinary.Builder vertexBuilder = VertexOrBinary.newBuilder();
    vertexBuilder.setVertexBinary(ByteString.copyFrom(submitWorkInfo.getVertexBinary()));
    if (submitWorkInfo.getVertexSignature() != null) {
        // Unsecure case?
        builder.setWorkSpecSignature(ByteString.copyFrom(submitWorkInfo.getVertexSignature()));
    }
    builder.setWorkSpec(vertexBuilder.build());
    builder.setFragmentNumber(taskNum);
    builder.setAttemptNumber(attemptNum);
    builder.setContainerIdString(containerId.toString());
    builder.setAmHost(LlapUtil.getAmHostNameFromAddress(address, job));
    builder.setAmPort(address.getPort());
    builder.setCredentialsBinary(ByteString.copyFrom(credentialsBinary));
    builder.setFragmentRuntimeInfo(runtimeInfo.build());
    builder.setInitialEventBytes(ByteString.copyFrom(fragmentBytes));
    if (fragmentBytesSignature != null) {
        builder.setInitialEventSignature(ByteString.copyFrom(fragmentBytesSignature));
    }
    return builder.build();
}
Also used : FragmentRuntimeInfo(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo) VertexOrBinary(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) SubmitWorkRequestProto(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto) ByteString(com.google.protobuf.ByteString) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) ByteBuffer(java.nio.ByteBuffer) Credentials(org.apache.hadoop.security.Credentials)

Example 2 with Credentials

use of org.apache.hadoop.security.Credentials in project hadoop by apache.

the class TestWorkPreservingRMRestart method testAppFailedToRenewTokenOnRecovery.

@Test(timeout = 30000)
public void testAppFailedToRenewTokenOnRecovery() throws Exception {
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 1);
    UserGroupInformation.setConfiguration(conf);
    MemoryRMStateStore memStore = new MemoryRMStateStore();
    memStore.init(conf);
    MockRM rm1 = new TestSecurityMockRM(conf, memStore);
    rm1.start();
    MockNM nm1 = new MockNM("127.0.0.1:1234", 8192, rm1.getResourceTrackerService());
    nm1.registerNode();
    RMApp app1 = rm1.submitApp(200);
    MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
    MockRM rm2 = new TestSecurityMockRM(conf, memStore) {

        protected DelegationTokenRenewer createDelegationTokenRenewer() {
            return new DelegationTokenRenewer() {

                @Override
                public void addApplicationSync(ApplicationId applicationId, Credentials ts, boolean shouldCancelAtEnd, String user) throws IOException {
                    throw new IOException("Token renew failed !!");
                }
            };
        }
    };
    nm1.setResourceTrackerService(rm2.getResourceTrackerService());
    rm2.start();
    NMContainerStatus containerStatus = TestRMRestart.createNMContainerStatus(am1.getApplicationAttemptId(), 1, ContainerState.RUNNING);
    nm1.registerNode(Arrays.asList(containerStatus), null);
    // am re-register
    rm2.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
    am1.setAMRMProtocol(rm2.getApplicationMasterService(), rm2.getRMContext());
    am1.registerAppAttempt(true);
    rm2.waitForState(app1.getApplicationId(), RMAppState.RUNNING);
    // Because the token expired, am could crash.
    nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
    rm2.waitForState(am1.getApplicationAttemptId(), RMAppAttemptState.FAILED);
    rm2.waitForState(app1.getApplicationId(), RMAppState.FAILED);
}
Also used : RMApp(org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp) DelegationTokenRenewer(org.apache.hadoop.yarn.server.resourcemanager.security.DelegationTokenRenewer) MemoryRMStateStore(org.apache.hadoop.yarn.server.resourcemanager.recovery.MemoryRMStateStore) NMContainerStatus(org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus) TestSecurityMockRM(org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityMockRM) IOException(java.io.IOException) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) Credentials(org.apache.hadoop.security.Credentials) TestSecurityMockRM(org.apache.hadoop.yarn.server.resourcemanager.TestRMRestart.TestSecurityMockRM) Test(org.junit.Test)

Example 3 with Credentials

use of org.apache.hadoop.security.Credentials in project weave by continuuity.

the class WeaveContainerMain method loadSecureStore.

private static void loadSecureStore() throws IOException {
    if (!UserGroupInformation.isSecurityEnabled()) {
        return;
    }
    File file = new File(Constants.Files.CREDENTIALS);
    if (file.exists()) {
        Credentials credentials = new Credentials();
        DataInputStream input = new DataInputStream(new FileInputStream(file));
        try {
            credentials.readTokenStorageStream(input);
        } finally {
            input.close();
        }
        UserGroupInformation.getCurrentUser().addCredentials(credentials);
        LOG.info("Secure store updated from {}", file);
    }
}
Also used : DataInputStream(java.io.DataInputStream) LocalFile(com.continuuity.weave.api.LocalFile) File(java.io.File) Credentials(org.apache.hadoop.security.Credentials) FileInputStream(java.io.FileInputStream)

Example 4 with Credentials

use of org.apache.hadoop.security.Credentials in project weave by continuuity.

the class AbstractWeaveService method handleSecureStoreUpdate.

/**
   * Attempts to handle secure store update.
   *
   * @param message The message received
   * @return {@code true} if the message requests for secure store update, {@code false} otherwise.
   */
protected final boolean handleSecureStoreUpdate(Message message) {
    if (!SystemMessages.SECURE_STORE_UPDATED.equals(message)) {
        return false;
    }
    // If not in secure mode, simply ignore the message.
    if (!UserGroupInformation.isSecurityEnabled()) {
        return true;
    }
    try {
        Credentials credentials = new Credentials();
        Location location = getSecureStoreLocation();
        DataInputStream input = new DataInputStream(new BufferedInputStream(location.getInputStream()));
        try {
            credentials.readTokenStorageStream(input);
        } finally {
            input.close();
        }
        UserGroupInformation.getCurrentUser().addCredentials(credentials);
        this.credentials = credentials;
        LOG.info("Secure store updated from {}.", location.toURI());
    } catch (Throwable t) {
        LOG.error("Failed to update secure store.", t);
    }
    return true;
}
Also used : BufferedInputStream(java.io.BufferedInputStream) DataInputStream(java.io.DataInputStream) Credentials(org.apache.hadoop.security.Credentials) Location(com.continuuity.weave.filesystem.Location)

Example 5 with Credentials

use of org.apache.hadoop.security.Credentials in project weave by continuuity.

the class YarnWeavePreparer method createCredentials.

private Credentials createCredentials() {
    Credentials credentials = new Credentials();
    try {
        credentials.addAll(UserGroupInformation.getCurrentUser().getCredentials());
        List<Token<?>> tokens = YarnUtils.addDelegationTokens(yarnConfig, locationFactory, credentials);
        for (Token<?> token : tokens) {
            LOG.debug("Delegation token acquired for {}, {}", locationFactory.getHomeLocation().toURI(), token);
        }
    } catch (IOException e) {
        LOG.warn("Failed to check for secure login type. Not gathering any delegation token.", e);
    }
    return credentials;
}
Also used : TypeToken(com.google.common.reflect.TypeToken) Token(org.apache.hadoop.security.token.Token) IOException(java.io.IOException) Credentials(org.apache.hadoop.security.Credentials)

Aggregations

Credentials (org.apache.hadoop.security.Credentials)351 Test (org.junit.Test)141 Token (org.apache.hadoop.security.token.Token)101 IOException (java.io.IOException)91 Text (org.apache.hadoop.io.Text)85 Configuration (org.apache.hadoop.conf.Configuration)75 Path (org.apache.hadoop.fs.Path)73 HashMap (java.util.HashMap)61 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)58 ByteBuffer (java.nio.ByteBuffer)55 DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)49 LocalResource (org.apache.hadoop.yarn.api.records.LocalResource)47 UserGroupInformation (org.apache.hadoop.security.UserGroupInformation)45 File (java.io.File)37 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)35 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)35 TokenIdentifier (org.apache.hadoop.security.token.TokenIdentifier)32 InetSocketAddress (java.net.InetSocketAddress)31 TezConfiguration (org.apache.tez.dag.api.TezConfiguration)31 FileSystem (org.apache.hadoop.fs.FileSystem)29