Search in sources :

Example 56 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project SQLWindowing by hbutani.

the class DataType method cloneInstance.

public T cloneInstance(T instance) throws IOException {
    DataOutputBuffer obuffer = serializeBuffers.get();
    obuffer.reset();
    instance.write(obuffer);
    DataInputBuffer ibuffer = materializeBuffers.get();
    byte[] b = obuffer.getData();
    ibuffer.reset(b, 0, b.length);
    T nv = create();
    nv.readFields(ibuffer);
    return nv;
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer)

Example 57 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestYARNTokenIdentifier method testRMDelegationTokenIdentifier.

@Test
public void testRMDelegationTokenIdentifier() throws IOException {
    Text owner = new Text("user1");
    Text renewer = new Text("user2");
    Text realUser = new Text("user3");
    long issueDate = 1;
    long maxDate = 2;
    int sequenceNumber = 3;
    int masterKeyId = 4;
    RMDelegationTokenIdentifier token = new RMDelegationTokenIdentifier(owner, renewer, realUser);
    token.setIssueDate(issueDate);
    token.setMaxDate(maxDate);
    token.setSequenceNumber(sequenceNumber);
    token.setMasterKeyId(masterKeyId);
    RMDelegationTokenIdentifier anotherToken = new RMDelegationTokenIdentifier();
    byte[] tokenContent = token.getBytes();
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(tokenContent, tokenContent.length);
    anotherToken.readFields(dib);
    dib.close();
    // verify the whole record equals with original record
    Assert.assertEquals("Token is not the same after serialization " + "and deserialization.", token, anotherToken);
    Assert.assertEquals("owner from proto is not the same with original token", anotherToken.getOwner(), owner);
    Assert.assertEquals("renewer from proto is not the same with original token", anotherToken.getRenewer(), renewer);
    Assert.assertEquals("realUser from proto is not the same with original token", anotherToken.getRealUser(), realUser);
    Assert.assertEquals("issueDate from proto is not the same with original token", anotherToken.getIssueDate(), issueDate);
    Assert.assertEquals("maxDate from proto is not the same with original token", anotherToken.getMaxDate(), maxDate);
    Assert.assertEquals("sequenceNumber from proto is not the same with original token", anotherToken.getSequenceNumber(), sequenceNumber);
    Assert.assertEquals("masterKeyId from proto is not the same with original token", anotherToken.getMasterKeyId(), masterKeyId);
    // Test getProto    
    RMDelegationTokenIdentifier token1 = new RMDelegationTokenIdentifier(owner, renewer, realUser);
    token1.setIssueDate(issueDate);
    token1.setMaxDate(maxDate);
    token1.setSequenceNumber(sequenceNumber);
    token1.setMasterKeyId(masterKeyId);
    YARNDelegationTokenIdentifierProto tokenProto = token1.getProto();
    // Write token proto to stream
    ByteArrayOutputStream baos = new ByteArrayOutputStream();
    DataOutputStream out = new DataOutputStream(baos);
    tokenProto.writeTo(out);
    // Read token
    byte[] tokenData = baos.toByteArray();
    RMDelegationTokenIdentifier readToken = new RMDelegationTokenIdentifier();
    DataInputBuffer db = new DataInputBuffer();
    db.reset(tokenData, tokenData.length);
    readToken.readFields(db);
    // Verify if read token equals with original token
    Assert.assertEquals("Token from getProto is not the same after " + "serialization and deserialization.", token1, readToken);
    db.close();
    out.close();
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) DataOutputStream(java.io.DataOutputStream) Text(org.apache.hadoop.io.Text) RMDelegationTokenIdentifier(org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier) ByteArrayOutputStream(java.io.ByteArrayOutputStream) YARNDelegationTokenIdentifierProto(org.apache.hadoop.yarn.proto.YarnSecurityTokenProtos.YARNDelegationTokenIdentifierProto) Test(org.junit.Test)

Example 58 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestYARNTokenIdentifier method testClientToAMTokenIdentifier.

@Test
public void testClientToAMTokenIdentifier() throws IOException {
    ApplicationAttemptId appAttemptId = ApplicationAttemptId.newInstance(ApplicationId.newInstance(1, 1), 1);
    String clientName = "user";
    ClientToAMTokenIdentifier token = new ClientToAMTokenIdentifier(appAttemptId, clientName);
    ClientToAMTokenIdentifier anotherToken = new ClientToAMTokenIdentifier();
    byte[] tokenContent = token.getBytes();
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(tokenContent, tokenContent.length);
    anotherToken.readFields(dib);
    // verify the whole record equals with original record
    Assert.assertEquals("Token is not the same after serialization " + "and deserialization.", token, anotherToken);
    Assert.assertEquals("ApplicationAttemptId from proto is not the same with original token", anotherToken.getApplicationAttemptID(), appAttemptId);
    Assert.assertEquals("clientName from proto is not the same with original token", anotherToken.getClientName(), clientName);
}
Also used : ClientToAMTokenIdentifier(org.apache.hadoop.yarn.security.client.ClientToAMTokenIdentifier) DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) ApplicationAttemptId(org.apache.hadoop.yarn.api.records.ApplicationAttemptId) Test(org.junit.Test)

Example 59 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestYARNTokenIdentifier method testContainerTokenIdentifier.

@Test
public void testContainerTokenIdentifier() throws IOException {
    ContainerId containerID = ContainerId.newContainerId(ApplicationAttemptId.newInstance(ApplicationId.newInstance(1, 1), 1), 1);
    String hostName = "host0";
    String appSubmitter = "usr0";
    Resource r = Resource.newInstance(1024, 1);
    long expiryTimeStamp = 1000;
    int masterKeyId = 1;
    long rmIdentifier = 1;
    Priority priority = Priority.newInstance(1);
    long creationTime = 1000;
    ContainerTokenIdentifier token = new ContainerTokenIdentifier(containerID, hostName, appSubmitter, r, expiryTimeStamp, masterKeyId, rmIdentifier, priority, creationTime);
    ContainerTokenIdentifier anotherToken = new ContainerTokenIdentifier();
    byte[] tokenContent = token.getBytes();
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(tokenContent, tokenContent.length);
    anotherToken.readFields(dib);
    // verify the whole record equals with original record
    Assert.assertEquals("Token is not the same after serialization " + "and deserialization.", token, anotherToken);
    Assert.assertEquals("ContainerID from proto is not the same with original token", anotherToken.getContainerID(), containerID);
    Assert.assertEquals("Hostname from proto is not the same with original token", anotherToken.getNmHostAddress(), hostName);
    Assert.assertEquals("ApplicationSubmitter from proto is not the same with original token", anotherToken.getApplicationSubmitter(), appSubmitter);
    Assert.assertEquals("Resource from proto is not the same with original token", anotherToken.getResource(), r);
    Assert.assertEquals("expiryTimeStamp from proto is not the same with original token", anotherToken.getExpiryTimeStamp(), expiryTimeStamp);
    Assert.assertEquals("KeyId from proto is not the same with original token", anotherToken.getMasterKeyId(), masterKeyId);
    Assert.assertEquals("RMIdentifier from proto is not the same with original token", anotherToken.getRMIdentifier(), rmIdentifier);
    Assert.assertEquals("Priority from proto is not the same with original token", anotherToken.getPriority(), priority);
    Assert.assertEquals("CreationTime from proto is not the same with original token", anotherToken.getCreationTime(), creationTime);
    Assert.assertNull(anotherToken.getLogAggregationContext());
    Assert.assertEquals(CommonNodeLabelsManager.NO_LABEL, anotherToken.getNodeLabelExpression());
    Assert.assertEquals(ContainerType.TASK, anotherToken.getContainerType());
    Assert.assertEquals(ExecutionType.GUARANTEED, anotherToken.getExecutionType());
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) Priority(org.apache.hadoop.yarn.api.records.Priority) Resource(org.apache.hadoop.yarn.api.records.Resource) Test(org.junit.Test)

Example 60 with DataInputBuffer

use of org.apache.hadoop.io.DataInputBuffer in project hadoop by apache.

the class TestYARNTokenIdentifier method testAMContainerTokenIdentifier.

@Test
public void testAMContainerTokenIdentifier() throws IOException {
    ContainerId containerID = ContainerId.newContainerId(ApplicationAttemptId.newInstance(ApplicationId.newInstance(1, 1), 1), 1);
    String hostName = "host0";
    String appSubmitter = "usr0";
    Resource r = Resource.newInstance(1024, 1);
    long expiryTimeStamp = 1000;
    int masterKeyId = 1;
    long rmIdentifier = 1;
    Priority priority = Priority.newInstance(1);
    long creationTime = 1000;
    ContainerTokenIdentifier token = new ContainerTokenIdentifier(containerID, hostName, appSubmitter, r, expiryTimeStamp, masterKeyId, rmIdentifier, priority, creationTime, null, CommonNodeLabelsManager.NO_LABEL, ContainerType.APPLICATION_MASTER);
    ContainerTokenIdentifier anotherToken = new ContainerTokenIdentifier();
    byte[] tokenContent = token.getBytes();
    DataInputBuffer dib = new DataInputBuffer();
    dib.reset(tokenContent, tokenContent.length);
    anotherToken.readFields(dib);
    Assert.assertEquals(ContainerType.APPLICATION_MASTER, anotherToken.getContainerType());
    Assert.assertEquals(ExecutionType.GUARANTEED, anotherToken.getExecutionType());
    token = new ContainerTokenIdentifier(containerID, 0, hostName, appSubmitter, r, expiryTimeStamp, masterKeyId, rmIdentifier, priority, creationTime, null, CommonNodeLabelsManager.NO_LABEL, ContainerType.TASK, ExecutionType.OPPORTUNISTIC);
    anotherToken = new ContainerTokenIdentifier();
    tokenContent = token.getBytes();
    dib = new DataInputBuffer();
    dib.reset(tokenContent, tokenContent.length);
    anotherToken.readFields(dib);
    Assert.assertEquals(ContainerType.TASK, anotherToken.getContainerType());
    Assert.assertEquals(ExecutionType.OPPORTUNISTIC, anotherToken.getExecutionType());
}
Also used : DataInputBuffer(org.apache.hadoop.io.DataInputBuffer) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) Priority(org.apache.hadoop.yarn.api.records.Priority) Resource(org.apache.hadoop.yarn.api.records.Resource) Test(org.junit.Test)

Aggregations

DataInputBuffer (org.apache.hadoop.io.DataInputBuffer)112 Test (org.junit.Test)49 DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)45 IOException (java.io.IOException)24 Text (org.apache.hadoop.io.Text)20 Path (org.apache.hadoop.fs.Path)16 Configuration (org.apache.hadoop.conf.Configuration)13 IntWritable (org.apache.hadoop.io.IntWritable)11 Random (java.util.Random)10 DataInputStream (java.io.DataInputStream)9 BufferedInputStream (java.io.BufferedInputStream)8 HashMap (java.util.HashMap)8 DataOutputStream (java.io.DataOutputStream)6 LongWritable (org.apache.hadoop.io.LongWritable)6 SerializationFactory (org.apache.hadoop.io.serializer.SerializationFactory)6 IFile (org.apache.tez.runtime.library.common.sort.impl.IFile)6 BufferedOutputStream (java.io.BufferedOutputStream)5 BytesWritable (org.apache.hadoop.io.BytesWritable)5 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)4 Credentials (org.apache.hadoop.security.Credentials)4