use of org.apache.hadoop.io.DataInputByteBuffer in project tez by apache.
the class TezRuntimeUtils method deserializeShuffleProviderMetaData.
public static int deserializeShuffleProviderMetaData(ByteBuffer meta) throws IOException {
DataInputByteBuffer in = new DataInputByteBuffer();
try {
in.reset(meta);
int port = in.readInt();
return port;
} finally {
in.close();
}
}
use of org.apache.hadoop.io.DataInputByteBuffer in project tez by apache.
the class DagTypeConverters method convertByteStringToCredentials.
public static Credentials convertByteStringToCredentials(ByteString byteString) {
if (byteString == null) {
return null;
}
DataInputByteBuffer dib = new DataInputByteBuffer();
dib.reset(byteString.asReadOnlyByteBuffer());
Credentials credentials = new Credentials();
try {
credentials.readTokenStorageStream(dib);
return credentials;
} catch (IOException e) {
throw new TezUncheckedException("Failed to deserialize Credentials", e);
}
}
use of org.apache.hadoop.io.DataInputByteBuffer in project tez by apache.
the class TestMockDAGAppMaster method testCountersAggregation.
@Test
public void testCountersAggregation() throws Exception {
TezConfiguration tezconf = new TezConfiguration(defaultConf);
MockTezClient tezClient = new MockTezClient("testMockAM", tezconf, true, null, null, null, null, false, false);
tezClient.start();
final String vAName = "A";
final String vBName = "B";
final String procCounterName = "Proc";
final String globalCounterName = "Global";
DAG dag = DAG.create("testCountersAggregation");
Vertex vA = Vertex.create(vAName, ProcessorDescriptor.create("Proc.class"), 10);
Vertex vB = Vertex.create(vBName, ProcessorDescriptor.create("Proc.class"), 1);
dag.addVertex(vA).addVertex(vB).addEdge(Edge.create(vA, vB, EdgeProperty.create(DataMovementType.SCATTER_GATHER, DataSourceType.PERSISTED, SchedulingType.SEQUENTIAL, OutputDescriptor.create("Out"), InputDescriptor.create("In"))));
TezCounters temp = new TezCounters();
temp.findCounter(new String(globalCounterName), new String(globalCounterName)).increment(1);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
DataOutput out = new DataOutputStream(bos);
temp.write(out);
final byte[] payload = bos.toByteArray();
MockDAGAppMaster mockApp = tezClient.getLocalClient().getMockApp();
MockContainerLauncher mockLauncher = mockApp.getContainerLauncher();
mockLauncher.startScheduling(false);
mockApp.countersDelegate = new CountersDelegate() {
int counterValue = 0;
@Override
public TezCounters getCounters(TaskSpec taskSpec) {
String vName = taskSpec.getVertexName();
TezCounters counters = new TezCounters();
final DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(ByteBuffer.wrap(payload));
try {
// this ensures that the serde code path is covered.
// the internal merges of counters covers the constructor code path.
counters.readFields(in);
} catch (IOException e) {
Assert.fail(e.getMessage());
}
counters.findCounter(vName, procCounterName).setValue(++counterValue);
for (OutputSpec output : taskSpec.getOutputs()) {
counters.findCounter(vName, output.getDestinationVertexName()).setValue(++counterValue);
}
for (InputSpec input : taskSpec.getInputs()) {
counters.findCounter(vName, input.getSourceVertexName()).setValue(++counterValue);
}
return counters;
}
};
mockApp.doSleep = false;
DAGClient dagClient = tezClient.submitDAG(dag);
mockLauncher.waitTillContainersLaunched();
DAGImpl dagImpl = (DAGImpl) mockApp.getContext().getCurrentDAG();
mockLauncher.startScheduling(true);
DAGStatus status = dagClient.waitForCompletion();
Assert.assertEquals(DAGStatus.State.SUCCEEDED, status.getState());
TezCounters counters = dagImpl.getAllCounters();
// verify processor counters
VertexImpl vAImpl = (VertexImpl) dagImpl.getVertex(vAName);
VertexImpl vBImpl = (VertexImpl) dagImpl.getVertex(vBName);
TezCounters vACounters = vAImpl.getAllCounters();
TezCounters vBCounters = vBImpl.getAllCounters();
Assert.assertEquals(19, ((AggregateTezCounterDelegate) vACounters.findCounter(vAName, procCounterName)).getMax());
Assert.assertEquals(1, ((AggregateTezCounterDelegate) vACounters.findCounter(vAName, procCounterName)).getMin());
Assert.assertEquals(20, ((AggregateTezCounterDelegate) vACounters.findCounter(vAName, vBName)).getMax());
Assert.assertEquals(2, ((AggregateTezCounterDelegate) vACounters.findCounter(vAName, vBName)).getMin());
Assert.assertEquals(21, ((AggregateTezCounterDelegate) vBCounters.findCounter(vBName, procCounterName)).getMin());
Assert.assertEquals(21, ((AggregateTezCounterDelegate) vBCounters.findCounter(vBName, procCounterName)).getMax());
Assert.assertEquals(22, ((AggregateTezCounterDelegate) vBCounters.findCounter(vBName, vAName)).getMin());
Assert.assertEquals(22, ((AggregateTezCounterDelegate) vBCounters.findCounter(vBName, vAName)).getMax());
tezClient.stop();
}
use of org.apache.hadoop.io.DataInputByteBuffer in project tez by apache.
the class TestMockDAGAppMaster method testBasicStatistics.
@Test(timeout = 10000)
public void testBasicStatistics() throws Exception {
TezConfiguration tezconf = new TezConfiguration(defaultConf);
MockTezClient tezClient = new MockTezClient("testMockAM", tezconf, true, null, null, null, null, false, false);
tezClient.start();
final String vAName = "A";
final String vBName = "B";
final String sourceName = "In";
final String sinkName = "Out";
DAG dag = DAG.create("testBasisStatistics");
Vertex vA = Vertex.create(vAName, ProcessorDescriptor.create("Proc.class"), 3);
Vertex vB = Vertex.create(vBName, ProcessorDescriptor.create("Proc.class"), 2);
vA.addDataSource(sourceName, DataSourceDescriptor.create(InputDescriptor.create("In"), null, null));
vB.addDataSink(sinkName, DataSinkDescriptor.create(OutputDescriptor.create("Out"), null, null));
dag.addVertex(vA).addVertex(vB).addEdge(Edge.create(vA, vB, EdgeProperty.create(DataMovementType.SCATTER_GATHER, DataSourceType.PERSISTED, SchedulingType.SEQUENTIAL, OutputDescriptor.create("Out"), InputDescriptor.create("In"))));
IOStatistics ioStats = new IOStatistics();
ioStats.setDataSize(1);
ioStats.setItemsProcessed(1);
TaskStatistics vAStats = new TaskStatistics();
vAStats.addIO(vBName, ioStats);
vAStats.addIO(sourceName, ioStats);
TaskStatistics vBStats = new TaskStatistics();
vBStats.addIO(vAName, ioStats);
vBStats.addIO(sinkName, ioStats);
ByteArrayOutputStream bosA = new ByteArrayOutputStream();
DataOutput outA = new DataOutputStream(bosA);
vAStats.write(outA);
final byte[] payloadA = bosA.toByteArray();
ByteArrayOutputStream bosB = new ByteArrayOutputStream();
DataOutput outB = new DataOutputStream(bosB);
vBStats.write(outB);
final byte[] payloadB = bosB.toByteArray();
MockDAGAppMaster mockApp = tezClient.getLocalClient().getMockApp();
MockContainerLauncher mockLauncher = mockApp.getContainerLauncher();
mockLauncher.startScheduling(false);
mockApp.statsDelegate = new StatisticsDelegate() {
@Override
public TaskStatistics getStatistics(TaskSpec taskSpec) {
byte[] payload = payloadA;
TaskStatistics stats = new TaskStatistics();
if (taskSpec.getVertexName().equals(vBName)) {
payload = payloadB;
}
final DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(ByteBuffer.wrap(payload));
try {
// this ensures that the serde code path is covered.
stats.readFields(in);
} catch (IOException e) {
Assert.fail(e.getMessage());
}
return stats;
}
};
mockApp.doSleep = false;
DAGClient dagClient = tezClient.submitDAG(dag);
mockLauncher.waitTillContainersLaunched();
DAGImpl dagImpl = (DAGImpl) mockApp.getContext().getCurrentDAG();
mockLauncher.startScheduling(true);
DAGStatus status = dagClient.waitForCompletion();
Assert.assertEquals(DAGStatus.State.SUCCEEDED, status.getState());
// verify that the values have been correct aggregated
for (org.apache.tez.dag.app.dag.Vertex v : dagImpl.getVertices().values()) {
VertexStatistics vStats = v.getStatistics();
if (v.getName().equals(vAName)) {
Assert.assertEquals(3, vStats.getOutputStatistics(vBName).getDataSize());
Assert.assertEquals(3, vStats.getInputStatistics(sourceName).getDataSize());
Assert.assertEquals(3, vStats.getOutputStatistics(vBName).getItemsProcessed());
Assert.assertEquals(3, vStats.getInputStatistics(sourceName).getItemsProcessed());
} else {
Assert.assertEquals(2, vStats.getInputStatistics(vAName).getDataSize());
Assert.assertEquals(2, vStats.getOutputStatistics(sinkName).getDataSize());
Assert.assertEquals(2, vStats.getInputStatistics(vAName).getItemsProcessed());
Assert.assertEquals(2, vStats.getOutputStatistics(sinkName).getItemsProcessed());
}
}
tezClient.stop();
}
use of org.apache.hadoop.io.DataInputByteBuffer in project hadoop by apache.
the class RMAppManager method parseCredentials.
protected Credentials parseCredentials(ApplicationSubmissionContext application) throws IOException {
Credentials credentials = new Credentials();
DataInputByteBuffer dibb = new DataInputByteBuffer();
ByteBuffer tokens = application.getAMContainerSpec().getTokens();
if (tokens != null) {
dibb.reset(tokens);
credentials.readTokenStorageStream(dibb);
tokens.rewind();
}
return credentials;
}
Aggregations