use of org.apache.hadoop.security.Credentials in project hive by apache.
the class LlapBaseInputFormat method constructSubmitWorkRequestProto.
private SubmitWorkRequestProto constructSubmitWorkRequestProto(SubmitWorkInfo submitWorkInfo, int taskNum, int attemptNum, InetSocketAddress address, Token<JobTokenIdentifier> token, byte[] fragmentBytes, byte[] fragmentBytesSignature, JobConf job) throws IOException {
ApplicationId appId = submitWorkInfo.getFakeAppId();
// This works, assuming the executor is running within YARN.
String user = System.getenv(ApplicationConstants.Environment.USER.name());
LOG.info("Setting user in submitWorkRequest to: " + user);
ContainerId containerId = ContainerId.newInstance(ApplicationAttemptId.newInstance(appId, attemptNum), taskNum);
// Credentials can change across DAGs. Ideally construct only once per DAG.
Credentials credentials = new Credentials();
TokenCache.setSessionToken(token, credentials);
ByteBuffer credentialsBinary = serializeCredentials(credentials);
FragmentRuntimeInfo.Builder runtimeInfo = FragmentRuntimeInfo.newBuilder();
runtimeInfo.setCurrentAttemptStartTime(System.currentTimeMillis());
runtimeInfo.setWithinDagPriority(0);
runtimeInfo.setDagStartTime(submitWorkInfo.getCreationTime());
runtimeInfo.setFirstAttemptStartTime(submitWorkInfo.getCreationTime());
runtimeInfo.setNumSelfAndUpstreamTasks(submitWorkInfo.getVertexParallelism());
runtimeInfo.setNumSelfAndUpstreamCompletedTasks(0);
SubmitWorkRequestProto.Builder builder = SubmitWorkRequestProto.newBuilder();
VertexOrBinary.Builder vertexBuilder = VertexOrBinary.newBuilder();
vertexBuilder.setVertexBinary(ByteString.copyFrom(submitWorkInfo.getVertexBinary()));
if (submitWorkInfo.getVertexSignature() != null) {
// Unsecure case?
builder.setWorkSpecSignature(ByteString.copyFrom(submitWorkInfo.getVertexSignature()));
}
builder.setWorkSpec(vertexBuilder.build());
builder.setFragmentNumber(taskNum);
builder.setAttemptNumber(attemptNum);
builder.setContainerIdString(containerId.toString());
builder.setAmHost(LlapUtil.getAmHostNameFromAddress(address, job));
builder.setAmPort(address.getPort());
builder.setCredentialsBinary(ByteString.copyFrom(credentialsBinary));
builder.setFragmentRuntimeInfo(runtimeInfo.build());
builder.setInitialEventBytes(ByteString.copyFrom(fragmentBytes));
if (fragmentBytesSignature != null) {
builder.setInitialEventSignature(ByteString.copyFrom(fragmentBytesSignature));
}
return builder.build();
}
use of org.apache.hadoop.security.Credentials in project hadoop by apache.
the class TestWorkPreservingRMRestart method testAppFailedToRenewTokenOnRecovery.
@Test(timeout = 30000)
public void testAppFailedToRenewTokenOnRecovery() throws Exception {
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
conf.setInt(YarnConfiguration.RM_AM_MAX_ATTEMPTS, 1);
UserGroupInformation.setConfiguration(conf);
MemoryRMStateStore memStore = new MemoryRMStateStore();
memStore.init(conf);
MockRM rm1 = new TestSecurityMockRM(conf, memStore);
rm1.start();
MockNM nm1 = new MockNM("127.0.0.1:1234", 8192, rm1.getResourceTrackerService());
nm1.registerNode();
RMApp app1 = rm1.submitApp(200);
MockAM am1 = MockRM.launchAndRegisterAM(app1, rm1, nm1);
MockRM rm2 = new TestSecurityMockRM(conf, memStore) {
protected DelegationTokenRenewer createDelegationTokenRenewer() {
return new DelegationTokenRenewer() {
@Override
public void addApplicationSync(ApplicationId applicationId, Credentials ts, boolean shouldCancelAtEnd, String user) throws IOException {
throw new IOException("Token renew failed !!");
}
};
}
};
nm1.setResourceTrackerService(rm2.getResourceTrackerService());
rm2.start();
NMContainerStatus containerStatus = TestRMRestart.createNMContainerStatus(am1.getApplicationAttemptId(), 1, ContainerState.RUNNING);
nm1.registerNode(Arrays.asList(containerStatus), null);
// am re-register
rm2.waitForState(app1.getApplicationId(), RMAppState.ACCEPTED);
am1.setAMRMProtocol(rm2.getApplicationMasterService(), rm2.getRMContext());
am1.registerAppAttempt(true);
rm2.waitForState(app1.getApplicationId(), RMAppState.RUNNING);
// Because the token expired, am could crash.
nm1.nodeHeartbeat(am1.getApplicationAttemptId(), 1, ContainerState.COMPLETE);
rm2.waitForState(am1.getApplicationAttemptId(), RMAppAttemptState.FAILED);
rm2.waitForState(app1.getApplicationId(), RMAppState.FAILED);
}
use of org.apache.hadoop.security.Credentials in project weave by continuuity.
the class WeaveContainerMain method loadSecureStore.
private static void loadSecureStore() throws IOException {
if (!UserGroupInformation.isSecurityEnabled()) {
return;
}
File file = new File(Constants.Files.CREDENTIALS);
if (file.exists()) {
Credentials credentials = new Credentials();
DataInputStream input = new DataInputStream(new FileInputStream(file));
try {
credentials.readTokenStorageStream(input);
} finally {
input.close();
}
UserGroupInformation.getCurrentUser().addCredentials(credentials);
LOG.info("Secure store updated from {}", file);
}
}
use of org.apache.hadoop.security.Credentials in project weave by continuuity.
the class AbstractWeaveService method handleSecureStoreUpdate.
/**
* Attempts to handle secure store update.
*
* @param message The message received
* @return {@code true} if the message requests for secure store update, {@code false} otherwise.
*/
protected final boolean handleSecureStoreUpdate(Message message) {
if (!SystemMessages.SECURE_STORE_UPDATED.equals(message)) {
return false;
}
// If not in secure mode, simply ignore the message.
if (!UserGroupInformation.isSecurityEnabled()) {
return true;
}
try {
Credentials credentials = new Credentials();
Location location = getSecureStoreLocation();
DataInputStream input = new DataInputStream(new BufferedInputStream(location.getInputStream()));
try {
credentials.readTokenStorageStream(input);
} finally {
input.close();
}
UserGroupInformation.getCurrentUser().addCredentials(credentials);
this.credentials = credentials;
LOG.info("Secure store updated from {}.", location.toURI());
} catch (Throwable t) {
LOG.error("Failed to update secure store.", t);
}
return true;
}
use of org.apache.hadoop.security.Credentials in project weave by continuuity.
the class YarnWeavePreparer method createCredentials.
private Credentials createCredentials() {
Credentials credentials = new Credentials();
try {
credentials.addAll(UserGroupInformation.getCurrentUser().getCredentials());
List<Token<?>> tokens = YarnUtils.addDelegationTokens(yarnConfig, locationFactory, credentials);
for (Token<?> token : tokens) {
LOG.debug("Delegation token acquired for {}, {}", locationFactory.getHomeLocation().toURI(), token);
}
} catch (IOException e) {
LOG.warn("Failed to check for secure login type. Not gathering any delegation token.", e);
}
return credentials;
}
Aggregations