use of java.io.DataInputStream in project hive by apache.
the class TestHadoopAuthBridge23 method testMetastoreProxyUser.
@Test
public void testMetastoreProxyUser() throws Exception {
setup();
final String proxyUserName = "proxyUser";
//set the configuration up such that proxyUser can act on
//behalf of all users belonging to the group foo_bar_group (
//a dummy group)
String[] groupNames = new String[] { "foo_bar_group" };
setGroupsInConf(groupNames, proxyUserName);
final UserGroupInformation delegationTokenUser = UserGroupInformation.getCurrentUser();
final UserGroupInformation proxyUserUgi = UserGroupInformation.createRemoteUser(proxyUserName);
String tokenStrForm = proxyUserUgi.doAs(new PrivilegedExceptionAction<String>() {
public String run() throws Exception {
try {
//foo_bar_group, the call to getDelegationTokenStr will fail
return getDelegationTokenStr(delegationTokenUser, proxyUserUgi);
} catch (AuthorizationException ae) {
return null;
}
}
});
Assert.assertTrue("Expected the getDelegationToken call to fail", tokenStrForm == null);
//set the configuration up such that proxyUser can act on
//behalf of all users belonging to the real group(s) that the
//user running the test belongs to
setGroupsInConf(UserGroupInformation.getCurrentUser().getGroupNames(), proxyUserName);
tokenStrForm = proxyUserUgi.doAs(new PrivilegedExceptionAction<String>() {
public String run() throws Exception {
try {
//obtained above the call to getDelegationTokenStr will succeed
return getDelegationTokenStr(delegationTokenUser, proxyUserUgi);
} catch (AuthorizationException ae) {
return null;
}
}
});
Assert.assertTrue("Expected the getDelegationToken call to not fail", tokenStrForm != null);
Token<DelegationTokenIdentifier> t = new Token<DelegationTokenIdentifier>();
t.decodeFromUrlString(tokenStrForm);
//check whether the username in the token is what we expect
DelegationTokenIdentifier d = new DelegationTokenIdentifier();
d.readFields(new DataInputStream(new ByteArrayInputStream(t.getIdentifier())));
Assert.assertTrue("Usernames don't match", delegationTokenUser.getShortUserName().equals(d.getUser().getShortUserName()));
}
use of java.io.DataInputStream in project hive by apache.
the class TestTimestampWritable method deserializeFromBytes.
private static TimestampWritable deserializeFromBytes(byte[] tsBytes) throws IOException {
ByteArrayInputStream bais = new ByteArrayInputStream(tsBytes);
DataInputStream dis = new DataInputStream(bais);
TimestampWritable deserTSW = new TimestampWritable();
deserTSW.readFields(dis);
return deserTSW;
}
use of java.io.DataInputStream in project weave by continuuity.
the class WeaveContainerMain method loadSecureStore.
private static void loadSecureStore() throws IOException {
if (!UserGroupInformation.isSecurityEnabled()) {
return;
}
File file = new File(Constants.Files.CREDENTIALS);
if (file.exists()) {
Credentials credentials = new Credentials();
DataInputStream input = new DataInputStream(new FileInputStream(file));
try {
credentials.readTokenStorageStream(input);
} finally {
input.close();
}
UserGroupInformation.getCurrentUser().addCredentials(credentials);
LOG.info("Secure store updated from {}", file);
}
}
use of java.io.DataInputStream in project weave by continuuity.
the class AbstractWeaveService method handleSecureStoreUpdate.
/**
* Attempts to handle secure store update.
*
* @param message The message received
* @return {@code true} if the message requests for secure store update, {@code false} otherwise.
*/
protected final boolean handleSecureStoreUpdate(Message message) {
if (!SystemMessages.SECURE_STORE_UPDATED.equals(message)) {
return false;
}
// If not in secure mode, simply ignore the message.
if (!UserGroupInformation.isSecurityEnabled()) {
return true;
}
try {
Credentials credentials = new Credentials();
Location location = getSecureStoreLocation();
DataInputStream input = new DataInputStream(new BufferedInputStream(location.getInputStream()));
try {
credentials.readTokenStorageStream(input);
} finally {
input.close();
}
UserGroupInformation.getCurrentUser().addCredentials(credentials);
this.credentials = credentials;
LOG.info("Secure store updated from {}.", location.toURI());
} catch (Throwable t) {
LOG.error("Failed to update secure store.", t);
}
return true;
}
use of java.io.DataInputStream in project buck by facebook.
the class HttpArtifactCacheBinaryProtocol method readMetadataAndPayload.
public static MetadataAndPayloadReadResultInternal readMetadataAndPayload(DataInputStream input, OutputStream payloadSink) throws IOException {
// Read the size of a the metadata, and use that to build a input stream to read and
// process the rest of it.
int metadataSize = input.readInt();
if (metadataSize > MAX_METADATA_HEADER_SIZE) {
throw new IOException(String.format("Metadata header size of %d is too big.", metadataSize));
}
MetadataAndPayloadReadResultInternal.Builder result = MetadataAndPayloadReadResultInternal.builder();
// Create a hasher to be used to generate a hash of the metadata and input. We'll use
// this to compare against the embedded checksum.
Hasher hasher = HASH_FUNCTION.newHasher();
byte[] rawMetadata = new byte[metadataSize];
ByteStreams.readFully(input, rawMetadata);
try (InputStream rawMetadataIn = new ByteArrayInputStream(rawMetadata)) {
// The first part of the metadata needs to be included in the hash.
try (DataInputStream metadataIn = new DataInputStream(new HasherInputStream(hasher, rawMetadataIn))) {
// Read in the rule keys that stored this artifact, and add them to the hash we're
// building up.
int size = metadataIn.readInt();
for (int i = 0; i < size; i++) {
result.addRuleKeys(new RuleKey(metadataIn.readUTF()));
}
// Read in the actual metadata map, and add it the hash.
size = metadataIn.readInt();
for (int i = 0; i < size; i++) {
String key = metadataIn.readUTF();
int valSize = metadataIn.readInt();
byte[] val = new byte[valSize];
ByteStreams.readFully(metadataIn, val);
result.putMetadata(key, new String(val, Charsets.UTF_8));
}
}
// Next, read in the embedded expected checksum, which should be the last byte in
// the metadata header.
byte[] hashCodeBytes = new byte[HASH_FUNCTION.bits() / Byte.SIZE];
ByteStreams.readFully(rawMetadataIn, hashCodeBytes);
result.setExpectedHashCode(HashCode.fromBytes(hashCodeBytes));
}
// The remaining data is the payload, which we write to the created file, and also include
// in our verification checksum.
Hasher artifactOnlyHasher = HASH_FUNCTION.newHasher();
try (InputStream payload = new HasherInputStream(artifactOnlyHasher, new HasherInputStream(hasher, input))) {
result.setResponseSizeBytes(ByteStreams.copy(payload, payloadSink));
result.setArtifactOnlyHashCode(artifactOnlyHasher.hash());
}
result.setActualHashCode(hasher.hash());
return result.build();
}
Aggregations