use of org.apache.hadoop.io.DataOutputByteBuffer in project hadoop by apache.
the class TestTaskID method testWrite.
/**
* Test of write method, of class TaskID.
*/
@Test
public void testWrite() throws Exception {
JobID jobId = new JobID("1234", 1);
TaskID taskId = new TaskID(jobId, TaskType.JOB_SETUP, 0);
DataOutputByteBuffer out = new DataOutputByteBuffer();
taskId.write(out);
DataInputByteBuffer in = new DataInputByteBuffer();
byte[] buffer = new byte[4];
in.reset(out.getData());
assertEquals("The write() method did not write the expected task ID", 0, in.readInt());
assertEquals("The write() method did not write the expected job ID", 1, in.readInt());
assertEquals("The write() method did not write the expected job " + "identifier length", 4, WritableUtils.readVInt(in));
in.readFully(buffer, 0, 4);
assertEquals("The write() method did not write the expected job " + "identifier length", "1234", new String(buffer));
assertEquals("The write() method did not write the expected task type", TaskType.JOB_SETUP, WritableUtils.readEnum(in, TaskType.class));
}
use of org.apache.hadoop.io.DataOutputByteBuffer in project apex-core by apache.
the class StreamingContainerManagerTest method testDeployInfoSerialization.
@Test
public void testDeployInfoSerialization() throws Exception {
OperatorDeployInfo ndi = new OperatorDeployInfo();
ndi.name = "node1";
ndi.type = OperatorDeployInfo.OperatorType.GENERIC;
ndi.id = 1;
ndi.contextAttributes = new com.datatorrent.api.Attribute.AttributeMap.DefaultAttributeMap();
ndi.contextAttributes.put(OperatorContext.SPIN_MILLIS, 100);
OperatorDeployInfo.InputDeployInfo input = new OperatorDeployInfo.InputDeployInfo();
input.declaredStreamId = "streamToNode";
input.portName = "inputPortNameOnNode";
input.sourceNodeId = 99;
ndi.inputs = new ArrayList<>();
ndi.inputs.add(input);
OperatorDeployInfo.OutputDeployInfo output = new OperatorDeployInfo.OutputDeployInfo();
output.declaredStreamId = "streamFromNode";
output.portName = "outputPortNameOnNode";
ndi.outputs = new ArrayList<>();
ndi.outputs.add(output);
ContainerHeartbeatResponse scc = new ContainerHeartbeatResponse();
scc.deployRequest = Collections.singletonList(ndi);
DataOutputByteBuffer out = new DataOutputByteBuffer();
scc.write(out);
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(out.getData());
ContainerHeartbeatResponse clone = new ContainerHeartbeatResponse();
clone.readFields(in);
Assert.assertNotNull(clone.deployRequest);
Assert.assertEquals(1, clone.deployRequest.size());
OperatorDeployInfo ndiClone = clone.deployRequest.get(0);
Assert.assertEquals("name", ndi.name, ndiClone.name);
Assert.assertEquals("type", ndi.type, ndiClone.type);
String nodeToString = ndi.toString();
Assert.assertTrue(nodeToString.contains(input.portName));
Assert.assertTrue(nodeToString.contains(output.portName));
Assert.assertEquals("contextAttributes " + ndiClone.contextAttributes, Integer.valueOf(100), ndiClone.contextAttributes.get(OperatorContext.SPIN_MILLIS));
}
use of org.apache.hadoop.io.DataOutputByteBuffer in project hadoop by apache.
the class TestTaskID method testReadFields.
/**
* Test of readFields method, of class TaskID.
*/
@Test
public void testReadFields() throws Exception {
DataOutputByteBuffer out = new DataOutputByteBuffer();
out.writeInt(0);
out.writeInt(1);
WritableUtils.writeVInt(out, 4);
out.write(new byte[] { 0x31, 0x32, 0x33, 0x34 });
WritableUtils.writeEnum(out, TaskType.REDUCE);
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(out.getData());
TaskID instance = new TaskID();
instance.readFields(in);
assertEquals("The readFields() method did not produce the expected task ID", "task_1234_0001_r_000000", instance.toString());
}
Aggregations