Search in sources :

Example 6 with SubmitWorkRequestProto

use of org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto in project hive by apache.

the class TestContainerRunnerImpl method testSubmitSameFragment.

@Test(timeout = 10000)
public void testSubmitSameFragment() throws Exception {
    Credentials credentials = new Credentials();
    Token<LlapTokenIdentifier> sessionToken = new Token<>("identifier".getBytes(), "testPassword".getBytes(), new Text("kind"), new Text("service"));
    TokenCache.setSessionToken(sessionToken, credentials);
    RegisterDagRequestProto request = RegisterDagRequestProto.newBuilder().setUser(testUser).setCredentialsBinary(ByteString.copyFrom(LlapTezUtils.serializeCredentials(credentials))).setQueryIdentifier(QueryIdentifierProto.newBuilder().setApplicationIdString(appId).setDagIndex(dagId).build()).build();
    containerRunner.registerDag(request);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredApps().size(), 1);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredApps().get(appId), dagId);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredDirectories().size(), 0);
    int fragNum = 1;
    int attemptNum = 0;
    SubmitWorkRequestProto sRequest1 = LlapDaemonTestUtils.buildSubmitProtoRequest(fragNum, attemptNum, appId, dagId, vId, "dagName", 0, 0, 0, 0, 1, credentials);
    containerRunner.submitWork(sRequest1);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredApps().size(), 1);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredApps().get(appId), dagId);
    if (ShuffleHandler.get().isDirWatcherEnabled()) {
        Assert.assertEquals(ShuffleHandler.get().getRegisteredDirectories().size(), 1);
        Assert.assertEquals(ShuffleHandler.get().getRegisteredDirectories().get(appId), dagId);
    }
    // submitWork() was successful, should show up as an active fragment.
    Assert.assertEquals(1, containerRunner.getExecutorStatus().size());
    boolean caughtException = false;
    // Try exact same fragment ID + attempt number - should fail.
    try {
        SubmitWorkRequestProto sRequest2 = LlapDaemonTestUtils.buildSubmitProtoRequest(fragNum, attemptNum, appId, dagId, vId, "dagName", 0, 0, 0, 0, 1, credentials);
        containerRunner.submitWork(sRequest2);
    } catch (IllegalArgumentException err) {
        err.printStackTrace();
        caughtException = true;
    }
    Assert.assertTrue(caughtException);
    // request failed so should still only have the 1 fragment
    Assert.assertEquals(1, containerRunner.getExecutorStatus().size());
    // Try same fragment ID with different attempt number - should work.
    attemptNum = 1;
    SubmitWorkRequestProto sRequest3 = LlapDaemonTestUtils.buildSubmitProtoRequest(fragNum, attemptNum, appId, dagId, vId, "dagName", 0, 0, 0, 0, 1, credentials);
    containerRunner.submitWork(sRequest3);
    // Should now have 2 fragments registered.
    Assert.assertEquals(2, containerRunner.getExecutorStatus().size());
}
Also used : LlapTokenIdentifier(org.apache.hadoop.hive.llap.security.LlapTokenIdentifier) SubmitWorkRequestProto(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) RegisterDagRequestProto(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Example 7 with SubmitWorkRequestProto

use of org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto in project hive by apache.

the class TestContainerRunnerImpl method testRegisterDag.

@Test(timeout = 10000)
public void testRegisterDag() throws Exception {
    Credentials credentials = new Credentials();
    Token<LlapTokenIdentifier> sessionToken = new Token<>("identifier".getBytes(), "testPassword".getBytes(), new Text("kind"), new Text("service"));
    TokenCache.setSessionToken(sessionToken, credentials);
    RegisterDagRequestProto request = RegisterDagRequestProto.newBuilder().setUser(testUser).setCredentialsBinary(ByteString.copyFrom(LlapTezUtils.serializeCredentials(credentials))).setQueryIdentifier(QueryIdentifierProto.newBuilder().setApplicationIdString(appId).setDagIndex(dagId).build()).build();
    containerRunner.registerDag(request);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredApps().size(), 1);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredApps().get(appId), dagId);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredDirectories().size(), 0);
    containerRunner.registerDag(request);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredApps().size(), 1);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredApps().get(appId), dagId);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredDirectories().size(), 0);
    SubmitWorkRequestProto sRequest = LlapDaemonTestUtils.buildSubmitProtoRequest(1, appId, dagId, vId, "dagName", 0, 0, 0, 0, 1, credentials);
    containerRunner.submitWork(sRequest);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredApps().size(), 1);
    Assert.assertEquals(ShuffleHandler.get().getRegisteredApps().get(appId), dagId);
    if (ShuffleHandler.get().isDirWatcherEnabled()) {
        Assert.assertEquals(ShuffleHandler.get().getRegisteredDirectories().size(), 1);
        Assert.assertEquals(ShuffleHandler.get().getRegisteredDirectories().get(appId), dagId);
    }
}
Also used : LlapTokenIdentifier(org.apache.hadoop.hive.llap.security.LlapTokenIdentifier) SubmitWorkRequestProto(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) RegisterDagRequestProto(org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto) Credentials(org.apache.hadoop.security.Credentials) Test(org.junit.Test)

Aggregations

SubmitWorkRequestProto (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.SubmitWorkRequestProto)7 ByteString (com.google.protobuf.ByteString)4 Credentials (org.apache.hadoop.security.Credentials)4 ByteBuffer (java.nio.ByteBuffer)3 FragmentRuntimeInfo (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.FragmentRuntimeInfo)3 LlapTokenIdentifier (org.apache.hadoop.hive.llap.security.LlapTokenIdentifier)3 RegisterDagRequestProto (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.RegisterDagRequestProto)2 VertexOrBinary (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.VertexOrBinary)2 Text (org.apache.hadoop.io.Text)2 Token (org.apache.hadoop.security.token.Token)2 ApplicationId (org.apache.hadoop.yarn.api.records.ApplicationId)2 ContainerId (org.apache.hadoop.yarn.api.records.ContainerId)2 Test (org.junit.Test)2 ServiceException (com.google.protobuf.ServiceException)1 IOException (java.io.IOException)1 OutputStream (java.io.OutputStream)1 BindException (java.net.BindException)1 Socket (java.net.Socket)1 LlapNodeId (org.apache.hadoop.hive.llap.LlapNodeId)1 LlapOutputSocketInitMessage (org.apache.hadoop.hive.llap.daemon.rpc.LlapDaemonProtocolProtos.LlapOutputSocketInitMessage)1