Search in sources :

Example 21 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestContainerLocalizer method createFakeCredentials.

@SuppressWarnings({ "rawtypes", "unchecked" })
static DataInputBuffer createFakeCredentials(Random r, int nTok) throws IOException {
    Credentials creds = new Credentials();
    byte[] password = new byte[20];
    Text kind = new Text();
    Text service = new Text();
    Text alias = new Text();
    for (int i = 0; i < nTok; ++i) {
        byte[] identifier = ("idef" + i).getBytes();
        r.nextBytes(password);
        kind.set("kind" + i);
        service.set("service" + i);
        alias.set("token" + i);
        Token token = new Token(identifier, password, kind, service);
        creds.addToken(alias, token);
    }
    DataOutputBuffer buf = new DataOutputBuffer();
    creds.writeTokenStorageToStream(buf);
    DataInputBuffer ret = new DataInputBuffer();
    ret.reset(buf.getData(), 0, buf.getLength());
    return ret;
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) Text(org.apache.hadoop.io.Text) Token(org.apache.hadoop.security.token.Token) Credentials(org.apache.hadoop.security.Credentials)

Example 22 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestPBRecordImpl method testLocalizerStatusSerDe.

@Test(timeout = 10000)
public void testLocalizerStatusSerDe() throws Exception {
    LocalizerStatus rsrcS = createLocalizerStatus();
    assertTrue(rsrcS instanceof LocalizerStatusPBImpl);
    LocalizerStatusPBImpl rsrcPb = (LocalizerStatusPBImpl) rsrcS;
    DataOutputBuffer out = new DataOutputBuffer();
    rsrcPb.getProto().writeDelimitedTo(out);
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), 0, out.getLength());
    LocalizerStatusProto rsrcPbD = LocalizerStatusProto.parseDelimitedFrom(in);
    assertNotNull(rsrcPbD);
    LocalizerStatus rsrcD = new LocalizerStatusPBImpl(rsrcPbD);
    assertEquals(rsrcS, rsrcD);
    assertEquals("localizer0", rsrcS.getLocalizerId());
    assertEquals("localizer0", rsrcD.getLocalizerId());
    assertEquals(createLocalResourceStatus(), rsrcS.getResourceStatus(0));
    assertEquals(createLocalResourceStatus(), rsrcD.getResourceStatus(0));
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) LocalizerStatus(org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerStatus) LocalizerStatusProto(org.apache.hadoop.yarn.proto.YarnServerNodemanagerServiceProtos.LocalizerStatusProto) Test(org.junit.Test)

Example 23 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestPBRecordImpl method testLocalizerHeartbeatResponseSerDe.

@Test(timeout = 10000)
public void testLocalizerHeartbeatResponseSerDe() throws Exception {
    LocalizerHeartbeatResponse rsrcS = createLocalizerHeartbeatResponse();
    assertTrue(rsrcS instanceof LocalizerHeartbeatResponsePBImpl);
    LocalizerHeartbeatResponsePBImpl rsrcPb = (LocalizerHeartbeatResponsePBImpl) rsrcS;
    DataOutputBuffer out = new DataOutputBuffer();
    rsrcPb.getProto().writeDelimitedTo(out);
    DataInputBuffer in = new DataInputBuffer();
    in.reset(out.getData(), 0, out.getLength());
    LocalizerHeartbeatResponseProto rsrcPbD = LocalizerHeartbeatResponseProto.parseDelimitedFrom(in);
    assertNotNull(rsrcPbD);
    LocalizerHeartbeatResponse rsrcD = new LocalizerHeartbeatResponsePBImpl(rsrcPbD);
    assertEquals(rsrcS, rsrcD);
    assertEquals(createResource(), rsrcS.getResourceSpecs().get(0).getResource());
    assertEquals(createResource(), rsrcD.getResourceSpecs().get(0).getResource());
}
Also used : LocalizerHeartbeatResponseProto(org.apache.hadoop.yarn.proto.YarnServerNodemanagerServiceProtos.LocalizerHeartbeatResponseProto) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) LocalizerHeartbeatResponse(org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerHeartbeatResponse) Test(org.junit.Test)

Example 24 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestContainerManagerSecurity method testContainerTokenWithEpoch.

/**
   * This tests whether a containerId is serialized/deserialized with epoch.
   *
   * @throws IOException
   * @throws InterruptedException
   * @throws YarnException
   */
private void testContainerTokenWithEpoch(Configuration conf) throws IOException, InterruptedException, YarnException {
    LOG.info("Running test for serializing/deserializing containerIds");
    NMTokenSecretManagerInRM nmTokenSecretManagerInRM = yarnCluster.getResourceManager().getRMContext().getNMTokenSecretManager();
    ApplicationId appId = ApplicationId.newInstance(1, 1);
    ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(appId, 0);
    ContainerId cId = ContainerId.newContainerId(appAttemptId, (5L << 40) | 3L);
    NodeManager nm = yarnCluster.getNodeManager(0);
    NMTokenSecretManagerInNM nmTokenSecretManagerInNM = nm.getNMContext().getNMTokenSecretManager();
    String user = "test";
    waitForNMToReceiveNMTokenKey(nmTokenSecretManagerInNM, nm);
    NodeId nodeId = nm.getNMContext().getNodeId();
    // Both id should be equal.
    Assert.assertEquals(nmTokenSecretManagerInNM.getCurrentKey().getKeyId(), nmTokenSecretManagerInRM.getCurrentKey().getKeyId());
    // Creating a normal Container Token
    RMContainerTokenSecretManager containerTokenSecretManager = yarnCluster.getResourceManager().getRMContext().getContainerTokenSecretManager();
    Resource r = Resource.newInstance(1230, 2);
    Token containerToken = containerTokenSecretManager.createContainerToken(cId, 0, nodeId, user, r, Priority.newInstance(0), 0);
    ContainerTokenIdentifier containerTokenIdentifier = new ContainerTokenIdentifier();
    byte[] tokenIdentifierContent = containerToken.getIdentifier().array();
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(tokenIdentifierContent, tokenIdentifierContent.length);
    containerTokenIdentifier.readFields(dib);
    Assert.assertEquals(cId, containerTokenIdentifier.getContainerID());
    Assert.assertEquals(cId.toString(), containerTokenIdentifier.getContainerID().toString());
    Token nmToken = nmTokenSecretManagerInRM.createNMToken(appAttemptId, nodeId, user);
    YarnRPC rpc = YarnRPC.create(conf);
    testStartContainer(rpc, appAttemptId, nodeId, containerToken, nmToken, false);
    List<ContainerId> containerIds = new LinkedList<ContainerId>();
    containerIds.add(cId);
    ContainerManagementProtocol proxy = getContainerManagementProtocolProxy(rpc, nmToken, nodeId, user);
    GetContainerStatusesResponse res = proxy.getContainerStatuses(GetContainerStatusesRequest.newInstance(containerIds));
    Assert.assertNotNull(res.getContainerStatuses().get(0));
    Assert.assertEquals(cId, res.getContainerStatuses().get(0).getContainerId());
    Assert.assertEquals(cId.toString(), res.getContainerStatuses().get(0).getContainerId().toString());
}
Also used : Resource(org.apache.hadoop.yarn.api.records.Resource) NMTokenSecretManagerInNM(org.apache.hadoop.yarn.server.nodemanager.security.NMTokenSecretManagerInNM) InvalidToken(org.apache.hadoop.security.token.SecretManager.InvalidToken) Token(org.apache.hadoop.yarn.api.records.Token) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) YarnRPC(org.apache.hadoop.yarn.ipc.YarnRPC) NMTokenSecretManagerInRM(org.apache.hadoop.yarn.server.resourcemanager.security.NMTokenSecretManagerInRM) LinkedList(java.util.LinkedList) ContainerTokenIdentifier(org.apache.hadoop.yarn.security.ContainerTokenIdentifier) GetContainerStatusesResponse(org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesResponse) NodeManager(org.apache.hadoop.yarn.server.nodemanager.NodeManager) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) ContainerManagementProtocol(org.apache.hadoop.yarn.api.ContainerManagementProtocol) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) NodeId(org.apache.hadoop.yarn.api.records.NodeId) RMContainerTokenSecretManager(org.apache.hadoop.yarn.server.resourcemanager.security.RMContainerTokenSecretManager) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId)

Example 25 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hive by apache.

the class LlapBaseInputFormat method getRecordReader.

@SuppressWarnings("unchecked")
@Override
public RecordReader<NullWritable, V> getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException {
    LlapInputSplit llapSplit = (LlapInputSplit) split;
    // Set conf to use LLAP user rather than current user for LLAP Zk registry.
    HiveConf.setVar(job, HiveConf.ConfVars.LLAP_ZK_REGISTRY_USER, llapSplit.getLlapUser());
    SubmitWorkInfo submitWorkInfo = SubmitWorkInfo.fromBytes(llapSplit.getPlanBytes());
    ServiceInstance serviceInstance = getServiceInstance(job, llapSplit);
    String host = serviceInstance.getHost();
    int llapSubmitPort = serviceInstance.getRpcPort();
    LOG.info("Found service instance for host " + host + " with rpc port " + llapSubmitPort + " and outputformat port " + serviceInstance.getOutputFormatPort());
    byte[] llapTokenBytes = llapSplit.getTokenBytes();
    Token<LlapTokenIdentifier> llapToken = null;
    if (llapTokenBytes != null) {
        DataInputBuffer in = new DataInputBuffer();
        in.reset(llapTokenBytes, 0, llapTokenBytes.length);
        llapToken = new Token<LlapTokenIdentifier>();
        llapToken.readFields(in);
    }
    LlapRecordReaderTaskUmbilicalExternalResponder umbilicalResponder = new LlapRecordReaderTaskUmbilicalExternalResponder();
    LlapTaskUmbilicalExternalClient llapClient = new LlapTaskUmbilicalExternalClient(job, submitWorkInfo.getTokenIdentifier(), submitWorkInfo.getToken(), umbilicalResponder, llapToken);
    llapClient.init(job);
    llapClient.start();
    int attemptNum = 0;
    // Use task attempt number from conf if provided
    TaskAttemptID taskAttemptId = TaskAttemptID.forName(job.get(MRJobConfig.TASK_ATTEMPT_ID));
    if (taskAttemptId != null) {
        attemptNum = taskAttemptId.getId();
        if (LOG.isDebugEnabled()) {
            LOG.debug("Setting attempt number to " + attemptNum + " from task attempt ID in conf: " + job.get(MRJobConfig.TASK_ATTEMPT_ID));
        }
    }
    SubmitWorkRequestProto request = constructSubmitWorkRequestProto(submitWorkInfo, llapSplit.getSplitNum(), attemptNum, llapClient.getAddress(), submitWorkInfo.getToken(), llapSplit.getFragmentBytes(), llapSplit.getFragmentBytesSignature(), job);
    llapClient.submitWork(request, host, llapSubmitPort);
    Socket socket = new Socket(host, serviceInstance.getOutputFormatPort());
    LOG.debug("Socket connected");
    SignableVertexSpec vertex = SignableVertexSpec.parseFrom(submitWorkInfo.getVertexBinary());
    String fragmentId = Converters.createTaskAttemptId(vertex.getQueryIdentifier(), vertex.getVertexIndex(), request.getFragmentNumber(), request.getAttemptNumber()).toString();
    OutputStream socketStream = socket.getOutputStream();
    LlapOutputSocketInitMessage.Builder builder = LlapOutputSocketInitMessage.newBuilder().setFragmentId(fragmentId);
    if (llapSplit.getTokenBytes() != null) {
        builder.setToken(ByteString.copyFrom(llapSplit.getTokenBytes()));
    }
    builder.build().writeDelimitedTo(socketStream);
    socketStream.flush();
    LOG.info("Registered id: " + fragmentId);
    @SuppressWarnings("rawtypes") LlapBaseRecordReader recordReader = new LlapBaseRecordReader(socket.getInputStream(), llapSplit.getSchema(), Text.class, job, llapClient, (java.io.Closeable) socket);
    umbilicalResponder.setRecordReader(recordReader);
    return recordReader;
}
Also used : LlapTokenIdentifier(org.apache.hadoop.hive.llap.security.LlapTokenIdentifier) LlapOutputSocketInitMessage(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage) TaskAttemptID(org.apache.hadoop.mapreduce.TaskAttemptID) TezTaskAttemptID(org.apache.tez.dag.records.TezTaskAttemptID) OutputStream(java.io.OutputStream) ServiceInstance(org.apache.hadoop.hive.llap.registry.ServiceInstance) ByteString(com.google.protobuf.ByteString) LlapTaskUmbilicalExternalClient(org.apache.hadoop.hive.llap.ext.LlapTaskUmbilicalExternalClient) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) SubmitWorkRequestProto(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto) SignableVertexSpec(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SignableVertexSpec) Socket(java.net.Socket)

Aggregations

DataInputBuffer (org.apache.hadoop.io.DataInputBuffer)68 Test (org.junit.Test)37 DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)36 IOException (java.io.IOException)16 Text (org.apache.hadoop.io.Text)10 BufferedInputStream (java.io.BufferedInputStream)8 DataInputStream (java.io.DataInputStream)8 Random (java.util.Random)8 Configuration (org.apache.hadoop.conf.Configuration)7 Path (org.apache.hadoop.fs.Path)7 DataOutputStream (java.io.DataOutputStream)6 BufferedOutputStream (java.io.BufferedOutputStream)5 ApplicationAttemptId (org.apache.hadoop.yarn.api.records.ApplicationAttemptId)5 FileSystem (org.apache.hadoop.fs.FileSystem)4 BytesWritable (org.apache.hadoop.io.BytesWritable)4 InputStream (java.io.InputStream)3 HashMap (java.util.HashMap)3 RandomDatum (org.apache.hadoop.io.RandomDatum)3 InvalidToken (org.apache.hadoop.security.token.SecretManager.InvalidToken)3 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)3