use of java.io.DataInputStream in project hadoop by apache.
the class TestJoinTupleWritable method testSparseWideWritable.
/**
* Tests a tuple writable with more than 64 values and the values set written
* spread far apart.
*/
@Test
public void testSparseWideWritable() throws Exception {
Writable[] manyWrits = makeRandomWritables(131);
TupleWritable sTuple = new TupleWritable(manyWrits);
for (int i = 0; i < manyWrits.length; i++) {
if (i % 65 == 0) {
sTuple.setWritten(i);
}
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
sTuple.write(new DataOutputStream(out));
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TupleWritable dTuple = new TupleWritable();
dTuple.readFields(new DataInputStream(in));
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
assertEquals("All tuple data has not been read from the stream", -1, in.read());
}
use of java.io.DataInputStream in project hadoop by apache.
the class TestJoinTupleWritable method testWideWritable2.
@Test
public void testWideWritable2() throws Exception {
Writable[] manyWrits = makeRandomWritables(71);
TupleWritable sTuple = new TupleWritable(manyWrits);
for (int i = 0; i < manyWrits.length; i++) {
sTuple.setWritten(i);
}
ByteArrayOutputStream out = new ByteArrayOutputStream();
sTuple.write(new DataOutputStream(out));
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TupleWritable dTuple = new TupleWritable();
dTuple.readFields(new DataInputStream(in));
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
assertEquals("All tuple data has not been read from the stream", -1, in.read());
}
use of java.io.DataInputStream in project hadoop by apache.
the class TestJoinTupleWritable method testWritable.
@Test
public void testWritable() throws Exception {
Random r = new Random();
Writable[] writs = { new BooleanWritable(r.nextBoolean()), new FloatWritable(r.nextFloat()), new FloatWritable(r.nextFloat()), new IntWritable(r.nextInt()), new LongWritable(r.nextLong()), new BytesWritable("dingo".getBytes()), new LongWritable(r.nextLong()), new IntWritable(r.nextInt()), new BytesWritable("yak".getBytes()), new IntWritable(r.nextInt()) };
TupleWritable sTuple = makeTuple(writs);
ByteArrayOutputStream out = new ByteArrayOutputStream();
sTuple.write(new DataOutputStream(out));
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TupleWritable dTuple = new TupleWritable();
dTuple.readFields(new DataInputStream(in));
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
}
use of java.io.DataInputStream in project hadoop by apache.
the class TestShuffleHandler method testKeepAlive.
@Test(timeout = 10000)
public void testKeepAlive() throws Exception {
final ArrayList<Throwable> failures = new ArrayList<Throwable>(1);
Configuration conf = new Configuration();
conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
conf.setBoolean(ShuffleHandler.SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED, true);
// try setting to -ve keep alive timeout.
conf.setInt(ShuffleHandler.SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT, -100);
ShuffleHandler shuffleHandler = new ShuffleHandler() {
@Override
protected Shuffle getShuffle(final Configuration conf) {
// replace the shuffle handler with one stubbed for testing
return new Shuffle(conf) {
@Override
protected MapOutputInfo getMapOutputInfo(String mapId, int reduce, String jobId, String user) throws IOException {
return null;
}
@Override
protected void verifyRequest(String appid, ChannelHandlerContext ctx, HttpRequest request, HttpResponse response, URL requestUri) throws IOException {
}
@Override
protected void populateHeaders(List<String> mapIds, String jobId, String user, int reduce, HttpRequest request, HttpResponse response, boolean keepAliveParam, Map<String, MapOutputInfo> infoMap) throws IOException {
// Send some dummy data (populate content length details)
ShuffleHeader header = new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1);
DataOutputBuffer dob = new DataOutputBuffer();
header.write(dob);
dob = new DataOutputBuffer();
for (int i = 0; i < 100000; ++i) {
header.write(dob);
}
long contentLength = dob.getLength();
// disable connectinKeepAliveEnabled if keepAliveParam is available
if (keepAliveParam) {
connectionKeepAliveEnabled = false;
}
super.setResponseHeaders(response, keepAliveParam, contentLength);
}
@Override
protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch, String user, String mapId, int reduce, MapOutputInfo info) throws IOException {
HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK);
// send a shuffle header and a lot of data down the channel
// to trigger a broken pipe
ShuffleHeader header = new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1);
DataOutputBuffer dob = new DataOutputBuffer();
header.write(dob);
ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
dob = new DataOutputBuffer();
for (int i = 0; i < 100000; ++i) {
header.write(dob);
}
return ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
}
@Override
protected void sendError(ChannelHandlerContext ctx, HttpResponseStatus status) {
if (failures.size() == 0) {
failures.add(new Error());
ctx.getChannel().close();
}
}
@Override
protected void sendError(ChannelHandlerContext ctx, String message, HttpResponseStatus status) {
if (failures.size() == 0) {
failures.add(new Error());
ctx.getChannel().close();
}
}
};
}
};
shuffleHandler.init(conf);
shuffleHandler.start();
String shuffleBaseURL = "http://127.0.0.1:" + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY);
URL url = new URL(shuffleBaseURL + "/mapOutput?job=job_12345_1&reduce=1&" + "map=attempt_12345_1_m_1_0");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
conn.connect();
DataInputStream input = new DataInputStream(conn.getInputStream());
Assert.assertEquals(HttpHeader.KEEP_ALIVE.asString(), conn.getHeaderField(HttpHeader.CONNECTION.asString()));
Assert.assertEquals("timeout=1", conn.getHeaderField(HttpHeader.KEEP_ALIVE.asString()));
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
ShuffleHeader header = new ShuffleHeader();
header.readFields(input);
input.close();
// For keepAlive via URL
url = new URL(shuffleBaseURL + "/mapOutput?job=job_12345_1&reduce=1&" + "map=attempt_12345_1_m_1_0&keepAlive=true");
conn = (HttpURLConnection) url.openConnection();
conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
conn.connect();
input = new DataInputStream(conn.getInputStream());
Assert.assertEquals(HttpHeader.KEEP_ALIVE.asString(), conn.getHeaderField(HttpHeader.CONNECTION.asString()));
Assert.assertEquals("timeout=1", conn.getHeaderField(HttpHeader.KEEP_ALIVE.asString()));
Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
header = new ShuffleHeader();
header.readFields(input);
input.close();
}
use of java.io.DataInputStream in project hadoop by apache.
the class TestShuffleHandler method testGetMapOutputInfo.
@Test(timeout = 100000)
public void testGetMapOutputInfo() throws Exception {
final ArrayList<Throwable> failures = new ArrayList<Throwable>(1);
Configuration conf = new Configuration();
conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
conf.setInt(ShuffleHandler.MAX_SHUFFLE_CONNECTIONS, 3);
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple");
UserGroupInformation.setConfiguration(conf);
File absLogDir = new File("target", TestShuffleHandler.class.getSimpleName() + "LocDir").getAbsoluteFile();
conf.set(YarnConfiguration.NM_LOCAL_DIRS, absLogDir.getAbsolutePath());
ApplicationId appId = ApplicationId.newInstance(12345, 1);
String appAttemptId = "attempt_12345_1_m_1_0";
String user = "randomUser";
String reducerId = "0";
List<File> fileMap = new ArrayList<File>();
createShuffleHandlerFiles(absLogDir, user, appId.toString(), appAttemptId, conf, fileMap);
ShuffleHandler shuffleHandler = new ShuffleHandler() {
@Override
protected Shuffle getShuffle(Configuration conf) {
// replace the shuffle handler with one stubbed for testing
return new Shuffle(conf) {
@Override
protected void populateHeaders(List<String> mapIds, String outputBaseStr, String user, int reduce, HttpRequest request, HttpResponse response, boolean keepAliveParam, Map<String, MapOutputInfo> infoMap) throws IOException {
// Only set response headers and skip everything else
// send some dummy value for content-length
super.setResponseHeaders(response, keepAliveParam, 100);
}
@Override
protected void verifyRequest(String appid, ChannelHandlerContext ctx, HttpRequest request, HttpResponse response, URL requestUri) throws IOException {
// Do nothing.
}
@Override
protected void sendError(ChannelHandlerContext ctx, String message, HttpResponseStatus status) {
if (failures.size() == 0) {
failures.add(new Error(message));
ctx.getChannel().close();
}
}
@Override
protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch, String user, String mapId, int reduce, MapOutputInfo info) throws IOException {
// send a shuffle header
ShuffleHeader header = new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1);
DataOutputBuffer dob = new DataOutputBuffer();
header.write(dob);
return ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
}
};
}
};
shuffleHandler.init(conf);
try {
shuffleHandler.start();
DataOutputBuffer outputBuffer = new DataOutputBuffer();
outputBuffer.reset();
Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>("identifier".getBytes(), "password".getBytes(), new Text(user), new Text("shuffleService"));
jt.write(outputBuffer);
shuffleHandler.initializeApplication(new ApplicationInitializationContext(user, appId, ByteBuffer.wrap(outputBuffer.getData(), 0, outputBuffer.getLength())));
URL url = new URL("http://127.0.0.1:" + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY) + "/mapOutput?job=job_12345_0001&reduce=" + reducerId + "&map=attempt_12345_1_m_1_0");
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
conn.connect();
try {
DataInputStream is = new DataInputStream(conn.getInputStream());
ShuffleHeader header = new ShuffleHeader();
header.readFields(is);
is.close();
} catch (EOFException e) {
// ignore
}
Assert.assertEquals("sendError called due to shuffle error", 0, failures.size());
} finally {
shuffleHandler.stop();
FileUtil.fullyDelete(absLogDir);
}
}
Aggregations