use of org.apache.hadoop.io.DataInputByteBuffer in project hive by apache.
the class ShuffleHandler method deserializeMetaData.
/**
* A helper function to deserialize the metadata returned by ShuffleHandler.
* @param meta the metadata returned by the ShuffleHandler
* @return the port the Shuffle Handler is listening on to serve shuffle data.
*/
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
//TODO this should be returning a class not just an int
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(meta);
int port = in.readInt();
return port;
}
use of org.apache.hadoop.io.DataInputByteBuffer in project apex-core by apache.
the class StreamingContainerManagerTest method testDeployInfoSerialization.
@Test
public void testDeployInfoSerialization() throws Exception {
OperatorDeployInfo ndi = new OperatorDeployInfo();
ndi.name = "node1";
ndi.type = OperatorDeployInfo.OperatorType.GENERIC;
ndi.id = 1;
ndi.contextAttributes = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
ndi.contextAttributes.put(OperatorContext.SPIN_MILLIS, 100);
OperatorDeployInfo.InputDeployInfo input = new OperatorDeployInfo.InputDeployInfo();
input.declaredStreamId = "streamToNode";
input.portName = "inputPortNameOnNode";
input.sourceNodeId = 99;
ndi.inputs = new ArrayList<>();
ndi.inputs.add(input);
OperatorDeployInfo.OutputDeployInfo output = new OperatorDeployInfo.OutputDeployInfo();
output.declaredStreamId = "streamFromNode";
output.portName = "outputPortNameOnNode";
ndi.outputs = new ArrayList<>();
ndi.outputs.add(output);
ContainerHeartbeatResponse scc = new ContainerHeartbeatResponse();
scc.deployRequest = Collections.singletonList(ndi);
DataOutputByteBuffer out = new DataOutputByteBuffer();
scc.write(out);
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(out.getData());
ContainerHeartbeatResponse clone = new ContainerHeartbeatResponse();
clone.readFields(in);
Assert.assertNotNull(clone.deployRequest);
Assert.assertEquals(1, clone.deployRequest.size());
OperatorDeployInfo ndiClone = clone.deployRequest.get(0);
Assert.assertEquals("name", ndi.name, ndiClone.name);
Assert.assertEquals("type", ndi.type, ndiClone.type);
String nodeToString = ndi.toString();
Assert.assertTrue(nodeToString.contains(input.portName));
Assert.assertTrue(nodeToString.contains(output.portName));
Assert.assertEquals("contextAttributes " + ndiClone.contextAttributes, Integer.valueOf(100), ndiClone.contextAttributes.get(OperatorContext.SPIN_MILLIS));
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class TestDelegationTokenRenewer method testReplaceExpiringDelegationToken.
@Test(timeout = 20000)
public void testReplaceExpiringDelegationToken() throws Exception {
conf.setBoolean(YarnConfiguration.RM_PROXY_USER_PRIVILEGES_ENABLED, true);
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
UserGroupInformation.setConfiguration(conf);
// create Token1:
Text userText1 = new Text("user1");
DelegationTokenIdentifier dtId1 = new DelegationTokenIdentifier(userText1, new Text("renewer1"), userText1);
// set max date to 0 to simulate an expiring token;
dtId1.setMaxDate(0);
final Token<DelegationTokenIdentifier> token1 = new Token<DelegationTokenIdentifier>(dtId1.getBytes(), "password1".getBytes(), dtId1.getKind(), new Text("service1"));
// create token2
Text userText2 = new Text("user2");
DelegationTokenIdentifier dtId2 = new DelegationTokenIdentifier(userText1, new Text("renewer2"), userText2);
final Token<DelegationTokenIdentifier> expectedToken = new Token<DelegationTokenIdentifier>(dtId2.getBytes(), "password2".getBytes(), dtId2.getKind(), new Text("service2"));
final MockRM rm = new TestSecurityMockRM(conf, null) {
@Override
protected DelegationTokenRenewer createDelegationTokenRenewer() {
return new DelegationTokenRenewer() {
@Override
protected Token<?>[] obtainSystemTokensForUser(String user, final Credentials credentials) throws IOException {
credentials.addToken(expectedToken.getService(), expectedToken);
return new Token<?>[] { expectedToken };
}
};
}
};
rm.start();
Credentials credentials = new Credentials();
credentials.addToken(userText1, token1);
RMApp app = rm.submitApp(200, "name", "user", new HashMap<ApplicationAccessType, String>(), false, "default", 1, credentials);
// wait for the initial expiring hdfs token to be removed from allTokens
GenericTestUtils.waitFor(new Supplier<Boolean>() {
public Boolean get() {
return rm.getRMContext().getDelegationTokenRenewer().getAllTokens().get(token1) == null;
}
}, 1000, 20000);
// wait for the initial expiring hdfs token to be removed from appTokens
GenericTestUtils.waitFor(new Supplier<Boolean>() {
public Boolean get() {
return !rm.getRMContext().getDelegationTokenRenewer().getDelegationTokens().contains(token1);
}
}, 1000, 20000);
// wait for the new retrieved hdfs token.
GenericTestUtils.waitFor(new Supplier<Boolean>() {
public Boolean get() {
return rm.getRMContext().getDelegationTokenRenewer().getDelegationTokens().contains(expectedToken);
}
}, 1000, 20000);
// check nm can retrieve the token
final MockNM nm1 = new MockNM("127.0.0.1:1234", 15120, rm.getResourceTrackerService());
nm1.registerNode();
NodeHeartbeatResponse response = nm1.nodeHeartbeat(true);
ByteBuffer tokenBuffer = response.getSystemCredentialsForApps().get(app.getApplicationId());
Assert.assertNotNull(tokenBuffer);
Credentials appCredentials = new Credentials();
DataInputByteBuffer buf = new DataInputByteBuffer();
tokenBuffer.rewind();
buf.reset(tokenBuffer);
appCredentials.readTokenStorageStream(buf);
Assert.assertTrue(appCredentials.getAllTokens().contains(expectedToken));
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hive by apache.
the class LlapTokenClient method extractToken.
private Token<LlapTokenIdentifier> extractToken(ByteString tokenBytes) throws IOException {
Token<LlapTokenIdentifier> token = new Token<>();
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(tokenBytes.asReadOnlyByteBuffer());
token.readFields(in);
return token;
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hive by apache.
the class ShuffleHandler method deserializeServiceData.
static Token<JobTokenIdentifier> deserializeServiceData(ByteBuffer secret) throws IOException {
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(secret);
Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
jt.readFields(in);
return jt;
}
Aggregations