Search in sources :

Example 1 with ChannelHandlerContext

use of org.jboss.netty.channel.ChannelHandlerContext in project hadoop by apache.

the class TestShuffleHandler method testKeepAlive.

@Test(timeout = 10000)
public void testKeepAlive() throws Exception {
    final ArrayList<Throwable> failures = new ArrayList<Throwable>(1);
    Configuration conf = new Configuration();
    conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
    conf.setBoolean(ShuffleHandler.SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED, true);
    // try setting to -ve keep alive timeout.
    conf.setInt(ShuffleHandler.SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT, -100);
    ShuffleHandler shuffleHandler = new ShuffleHandler() {

        @Override
        protected Shuffle getShuffle(final Configuration conf) {
            // replace the shuffle handler with one stubbed for testing
            return new Shuffle(conf) {

                @Override
                protected MapOutputInfo getMapOutputInfo(String mapId, int reduce, String jobId, String user) throws IOException {
                    return null;
                }

                @Override
                protected void verifyRequest(String appid, ChannelHandlerContext ctx, HttpRequest request, HttpResponse response, URL requestUri) throws IOException {
                }

                @Override
                protected void populateHeaders(List<String> mapIds, String jobId, String user, int reduce, HttpRequest request, HttpResponse response, boolean keepAliveParam, Map<String, MapOutputInfo> infoMap) throws IOException {
                    // Send some dummy data (populate content length details)
                    ShuffleHeader header = new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1);
                    DataOutputBuffer dob = new DataOutputBuffer();
                    header.write(dob);
                    dob = new DataOutputBuffer();
                    for (int i = 0; i < 100000; ++i) {
                        header.write(dob);
                    }
                    long contentLength = dob.getLength();
                    // disable connectinKeepAliveEnabled if keepAliveParam is available
                    if (keepAliveParam) {
                        connectionKeepAliveEnabled = false;
                    }
                    super.setResponseHeaders(response, keepAliveParam, contentLength);
                }

                @Override
                protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch, String user, String mapId, int reduce, MapOutputInfo info) throws IOException {
                    HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK);
                    // send a shuffle header and a lot of data down the channel
                    // to trigger a broken pipe
                    ShuffleHeader header = new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1);
                    DataOutputBuffer dob = new DataOutputBuffer();
                    header.write(dob);
                    ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
                    dob = new DataOutputBuffer();
                    for (int i = 0; i < 100000; ++i) {
                        header.write(dob);
                    }
                    return ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
                }

                @Override
                protected void sendError(ChannelHandlerContext ctx, HttpResponseStatus status) {
                    if (failures.size() == 0) {
                        failures.add(new Error());
                        ctx.getChannel().close();
                    }
                }

                @Override
                protected void sendError(ChannelHandlerContext ctx, String message, HttpResponseStatus status) {
                    if (failures.size() == 0) {
                        failures.add(new Error());
                        ctx.getChannel().close();
                    }
                }
            };
        }
    };
    shuffleHandler.init(conf);
    shuffleHandler.start();
    String shuffleBaseURL = "http://127.0.0.1:" + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY);
    URL url = new URL(shuffleBaseURL + "/mapOutput?job=job_12345_1&reduce=1&" + "map=attempt_12345_1_m_1_0");
    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
    conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
    conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
    conn.connect();
    DataInputStream input = new DataInputStream(conn.getInputStream());
    Assert.assertEquals(HttpHeader.KEEP_ALIVE.asString(), conn.getHeaderField(HttpHeader.CONNECTION.asString()));
    Assert.assertEquals("timeout=1", conn.getHeaderField(HttpHeader.KEEP_ALIVE.asString()));
    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
    ShuffleHeader header = new ShuffleHeader();
    header.readFields(input);
    input.close();
    // For keepAlive via URL
    url = new URL(shuffleBaseURL + "/mapOutput?job=job_12345_1&reduce=1&" + "map=attempt_12345_1_m_1_0&keepAlive=true");
    conn = (HttpURLConnection) url.openConnection();
    conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
    conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
    conn.connect();
    input = new DataInputStream(conn.getInputStream());
    Assert.assertEquals(HttpHeader.KEEP_ALIVE.asString(), conn.getHeaderField(HttpHeader.CONNECTION.asString()));
    Assert.assertEquals("timeout=1", conn.getHeaderField(HttpHeader.KEEP_ALIVE.asString()));
    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
    header = new ShuffleHeader();
    header.readFields(input);
    input.close();
}
Also used : HttpRequest(org.jboss.netty.handler.codec.http.HttpRequest) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) HttpResponseStatus(org.jboss.netty.handler.codec.http.HttpResponseStatus) ShuffleHeader(org.apache.hadoop.mapreduce.task.reduce.ShuffleHeader) SocketChannel(org.jboss.netty.channel.socket.SocketChannel) Channel(org.jboss.netty.channel.Channel) AbstractChannel(org.jboss.netty.channel.AbstractChannel) ArrayList(java.util.ArrayList) DefaultHttpResponse(org.jboss.netty.handler.codec.http.DefaultHttpResponse) HttpResponse(org.jboss.netty.handler.codec.http.HttpResponse) ChannelHandlerContext(org.jboss.netty.channel.ChannelHandlerContext) DataInputStream(java.io.DataInputStream) URL(java.net.URL) HttpURLConnection(java.net.HttpURLConnection) DefaultHttpResponse(org.jboss.netty.handler.codec.http.DefaultHttpResponse) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) List(java.util.List) ArrayList(java.util.ArrayList) Map(java.util.Map) Test(org.junit.Test)

Example 2 with ChannelHandlerContext

use of org.jboss.netty.channel.ChannelHandlerContext in project hadoop by apache.

the class TestShuffleHandler method testGetMapOutputInfo.

@Test(timeout = 100000)
public void testGetMapOutputInfo() throws Exception {
    final ArrayList<Throwable> failures = new ArrayList<Throwable>(1);
    Configuration conf = new Configuration();
    conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
    conf.setInt(ShuffleHandler.MAX_SHUFFLE_CONNECTIONS, 3);
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "simple");
    UserGroupInformation.setConfiguration(conf);
    File absLogDir = new File("target", TestShuffleHandler.class.getSimpleName() + "LocDir").getAbsoluteFile();
    conf.set(YarnConfiguration.NM_LOCAL_DIRS, absLogDir.getAbsolutePath());
    ApplicationId appId = ApplicationId.newInstance(12345, 1);
    String appAttemptId = "attempt_12345_1_m_1_0";
    String user = "randomUser";
    String reducerId = "0";
    List<File> fileMap = new ArrayList<File>();
    createShuffleHandlerFiles(absLogDir, user, appId.toString(), appAttemptId, conf, fileMap);
    ShuffleHandler shuffleHandler = new ShuffleHandler() {

        @Override
        protected Shuffle getShuffle(Configuration conf) {
            // replace the shuffle handler with one stubbed for testing
            return new Shuffle(conf) {

                @Override
                protected void populateHeaders(List<String> mapIds, String outputBaseStr, String user, int reduce, HttpRequest request, HttpResponse response, boolean keepAliveParam, Map<String, MapOutputInfo> infoMap) throws IOException {
                    // Only set response headers and skip everything else
                    // send some dummy value for content-length
                    super.setResponseHeaders(response, keepAliveParam, 100);
                }

                @Override
                protected void verifyRequest(String appid, ChannelHandlerContext ctx, HttpRequest request, HttpResponse response, URL requestUri) throws IOException {
                // Do nothing.
                }

                @Override
                protected void sendError(ChannelHandlerContext ctx, String message, HttpResponseStatus status) {
                    if (failures.size() == 0) {
                        failures.add(new Error(message));
                        ctx.getChannel().close();
                    }
                }

                @Override
                protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch, String user, String mapId, int reduce, MapOutputInfo info) throws IOException {
                    // send a shuffle header
                    ShuffleHeader header = new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1);
                    DataOutputBuffer dob = new DataOutputBuffer();
                    header.write(dob);
                    return ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
                }
            };
        }
    };
    shuffleHandler.init(conf);
    try {
        shuffleHandler.start();
        DataOutputBuffer outputBuffer = new DataOutputBuffer();
        outputBuffer.reset();
        Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>("identifier".getBytes(), "password".getBytes(), new Text(user), new Text("shuffleService"));
        jt.write(outputBuffer);
        shuffleHandler.initializeApplication(new ApplicationInitializationContext(user, appId, ByteBuffer.wrap(outputBuffer.getData(), 0, outputBuffer.getLength())));
        URL url = new URL("http://127.0.0.1:" + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY) + "/mapOutput?job=job_12345_0001&reduce=" + reducerId + "&map=attempt_12345_1_m_1_0");
        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
        conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
        conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
        conn.connect();
        try {
            DataInputStream is = new DataInputStream(conn.getInputStream());
            ShuffleHeader header = new ShuffleHeader();
            header.readFields(is);
            is.close();
        } catch (EOFException e) {
        // ignore
        }
        Assert.assertEquals("sendError called due to shuffle error", 0, failures.size());
    } finally {
        shuffleHandler.stop();
        FileUtil.fullyDelete(absLogDir);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) HttpResponseStatus(org.jboss.netty.handler.codec.http.HttpResponseStatus) ArrayList(java.util.ArrayList) ChannelHandlerContext(org.jboss.netty.channel.ChannelHandlerContext) Token(org.apache.hadoop.security.token.Token) URL(java.net.URL) HttpURLConnection(java.net.HttpURLConnection) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) EOFException(java.io.EOFException) List(java.util.List) ArrayList(java.util.ArrayList) ApplicationInitializationContext(org.apache.hadoop.yarn.server.api.ApplicationInitializationContext) HttpRequest(org.jboss.netty.handler.codec.http.HttpRequest) SocketChannel(org.jboss.netty.channel.socket.SocketChannel) Channel(org.jboss.netty.channel.Channel) AbstractChannel(org.jboss.netty.channel.AbstractChannel) ShuffleHeader(org.apache.hadoop.mapreduce.task.reduce.ShuffleHeader) DefaultHttpResponse(org.jboss.netty.handler.codec.http.DefaultHttpResponse) HttpResponse(org.jboss.netty.handler.codec.http.HttpResponse) JobTokenIdentifier(org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier) Text(org.apache.hadoop.io.Text) DataInputStream(java.io.DataInputStream) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) File(java.io.File) Map(java.util.Map) Test(org.junit.Test)

Example 3 with ChannelHandlerContext

use of org.jboss.netty.channel.ChannelHandlerContext in project hadoop by apache.

the class TestShuffleHandler method testClientClosesConnection.

/**
   * Verify client prematurely closing a connection.
   *
   * @throws Exception exception.
   */
@Test(timeout = 10000)
public void testClientClosesConnection() throws Exception {
    final ArrayList<Throwable> failures = new ArrayList<Throwable>(1);
    Configuration conf = new Configuration();
    conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
    ShuffleHandler shuffleHandler = new ShuffleHandler() {

        @Override
        protected Shuffle getShuffle(Configuration conf) {
            // replace the shuffle handler with one stubbed for testing
            return new Shuffle(conf) {

                @Override
                protected MapOutputInfo getMapOutputInfo(String mapId, int reduce, String jobId, String user) throws IOException {
                    return null;
                }

                @Override
                protected void populateHeaders(List<String> mapIds, String jobId, String user, int reduce, HttpRequest request, HttpResponse response, boolean keepAliveParam, Map<String, MapOutputInfo> infoMap) throws IOException {
                    // Only set response headers and skip everything else
                    // send some dummy value for content-length
                    super.setResponseHeaders(response, keepAliveParam, 100);
                }

                @Override
                protected void verifyRequest(String appid, ChannelHandlerContext ctx, HttpRequest request, HttpResponse response, URL requestUri) throws IOException {
                }

                @Override
                protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch, String user, String mapId, int reduce, MapOutputInfo info) throws IOException {
                    // send a shuffle header and a lot of data down the channel
                    // to trigger a broken pipe
                    ShuffleHeader header = new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1);
                    DataOutputBuffer dob = new DataOutputBuffer();
                    header.write(dob);
                    ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
                    dob = new DataOutputBuffer();
                    for (int i = 0; i < 100000; ++i) {
                        header.write(dob);
                    }
                    return ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
                }

                @Override
                protected void sendError(ChannelHandlerContext ctx, HttpResponseStatus status) {
                    if (failures.size() == 0) {
                        failures.add(new Error());
                        ctx.getChannel().close();
                    }
                }

                @Override
                protected void sendError(ChannelHandlerContext ctx, String message, HttpResponseStatus status) {
                    if (failures.size() == 0) {
                        failures.add(new Error());
                        ctx.getChannel().close();
                    }
                }
            };
        }
    };
    shuffleHandler.init(conf);
    shuffleHandler.start();
    // simulate a reducer that closes early by reading a single shuffle header
    // then closing the connection
    URL url = new URL("http://127.0.0.1:" + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY) + "/mapOutput?job=job_12345_1&reduce=1&map=attempt_12345_1_m_1_0");
    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
    conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
    conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
    conn.connect();
    DataInputStream input = new DataInputStream(conn.getInputStream());
    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
    Assert.assertEquals("close", conn.getHeaderField(HttpHeader.CONNECTION.asString()));
    ShuffleHeader header = new ShuffleHeader();
    header.readFields(input);
    input.close();
    shuffleHandler.stop();
    Assert.assertTrue("sendError called when client closed connection", failures.size() == 0);
}
Also used : HttpRequest(org.jboss.netty.handler.codec.http.HttpRequest) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) HttpResponseStatus(org.jboss.netty.handler.codec.http.HttpResponseStatus) SocketChannel(org.jboss.netty.channel.socket.SocketChannel) Channel(org.jboss.netty.channel.Channel) AbstractChannel(org.jboss.netty.channel.AbstractChannel) ShuffleHeader(org.apache.hadoop.mapreduce.task.reduce.ShuffleHeader) ArrayList(java.util.ArrayList) DefaultHttpResponse(org.jboss.netty.handler.codec.http.DefaultHttpResponse) HttpResponse(org.jboss.netty.handler.codec.http.HttpResponse) ChannelHandlerContext(org.jboss.netty.channel.ChannelHandlerContext) DataInputStream(java.io.DataInputStream) URL(java.net.URL) HttpURLConnection(java.net.HttpURLConnection) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) List(java.util.List) ArrayList(java.util.ArrayList) Map(java.util.Map) Test(org.junit.Test)

Example 4 with ChannelHandlerContext

use of org.jboss.netty.channel.ChannelHandlerContext in project hadoop by apache.

the class TestShuffleHandler method testMapFileAccess.

/**
   * Validate the ownership of the map-output files being pulled in. The
   * local-file-system owner of the file should match the user component in the
   *
   * @throws Exception exception
   */
@Test(timeout = 100000)
public void testMapFileAccess() throws IOException {
    // This will run only in NativeIO is enabled as SecureIOUtils need it
    assumeTrue(NativeIO.isAvailable());
    Configuration conf = new Configuration();
    conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
    conf.setInt(ShuffleHandler.MAX_SHUFFLE_CONNECTIONS, 3);
    conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
    UserGroupInformation.setConfiguration(conf);
    File absLogDir = new File("target", TestShuffleHandler.class.getSimpleName() + "LocDir").getAbsoluteFile();
    conf.set(YarnConfiguration.NM_LOCAL_DIRS, absLogDir.getAbsolutePath());
    ApplicationId appId = ApplicationId.newInstance(12345, 1);
    LOG.info(appId.toString());
    String appAttemptId = "attempt_12345_1_m_1_0";
    String user = "randomUser";
    String reducerId = "0";
    List<File> fileMap = new ArrayList<File>();
    createShuffleHandlerFiles(absLogDir, user, appId.toString(), appAttemptId, conf, fileMap);
    ShuffleHandler shuffleHandler = new ShuffleHandler() {

        @Override
        protected Shuffle getShuffle(Configuration conf) {
            // replace the shuffle handler with one stubbed for testing
            return new Shuffle(conf) {

                @Override
                protected void verifyRequest(String appid, ChannelHandlerContext ctx, HttpRequest request, HttpResponse response, URL requestUri) throws IOException {
                // Do nothing.
                }
            };
        }
    };
    shuffleHandler.init(conf);
    try {
        shuffleHandler.start();
        DataOutputBuffer outputBuffer = new DataOutputBuffer();
        outputBuffer.reset();
        Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>("identifier".getBytes(), "password".getBytes(), new Text(user), new Text("shuffleService"));
        jt.write(outputBuffer);
        shuffleHandler.initializeApplication(new ApplicationInitializationContext(user, appId, ByteBuffer.wrap(outputBuffer.getData(), 0, outputBuffer.getLength())));
        URL url = new URL("http://127.0.0.1:" + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY) + "/mapOutput?job=job_12345_0001&reduce=" + reducerId + "&map=attempt_12345_1_m_1_0");
        HttpURLConnection conn = (HttpURLConnection) url.openConnection();
        conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
        conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
        conn.connect();
        byte[] byteArr = new byte[10000];
        try {
            DataInputStream is = new DataInputStream(conn.getInputStream());
            is.readFully(byteArr);
        } catch (EOFException e) {
        // ignore
        }
        // Retrieve file owner name
        FileInputStream is = new FileInputStream(fileMap.get(0));
        String owner = NativeIO.POSIX.getFstat(is.getFD()).getOwner();
        is.close();
        String message = "Owner '" + owner + "' for path " + fileMap.get(0).getAbsolutePath() + " did not match expected owner '" + user + "'";
        Assert.assertTrue((new String(byteArr)).contains(message));
    } finally {
        shuffleHandler.stop();
        FileUtil.fullyDelete(absLogDir);
    }
}
Also used : HttpRequest(org.jboss.netty.handler.codec.http.HttpRequest) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) ArrayList(java.util.ArrayList) DefaultHttpResponse(org.jboss.netty.handler.codec.http.DefaultHttpResponse) HttpResponse(org.jboss.netty.handler.codec.http.HttpResponse) JobTokenIdentifier(org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier) ChannelHandlerContext(org.jboss.netty.channel.ChannelHandlerContext) Token(org.apache.hadoop.security.token.Token) Text(org.apache.hadoop.io.Text) DataInputStream(java.io.DataInputStream) URL(java.net.URL) FileInputStream(java.io.FileInputStream) HttpURLConnection(java.net.HttpURLConnection) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) EOFException(java.io.EOFException) ApplicationId(org.apache.hadoop.yarn.api.records.ApplicationId) File(java.io.File) ApplicationInitializationContext(org.apache.hadoop.yarn.server.api.ApplicationInitializationContext) Test(org.junit.Test)

Example 5 with ChannelHandlerContext

use of org.jboss.netty.channel.ChannelHandlerContext in project MSEC by Tencent.

the class ResponseDecoder method decode.

@Override
protected Object decode(ChannelHandlerContext channelHandlerContext, Channel channel, Object msg) throws Exception {
    long receiveTime = System.currentTimeMillis();
    if (!(msg instanceof ChannelBuffer)) {
        return msg;
    }
    //System.out.println("Response recvtime: " + System.currentTimeMillis());
    List<Object> messages = null;
    ChannelBuffer cb = (ChannelBuffer) NettyCodecUtils.getAttachment(channelHandlerContext, Constants.ATTACHMENT_BYTEBUFFER);
    ChannelBuffer cb_ = (ChannelBuffer) msg;
    if (cb == null) {
        cb = cb_;
    } else {
        cb.writeBytes(cb_);
    }
    ChannelHandlerContext encodeCtx = channelHandlerContext.getPipeline().getContext("encoder");
    Integer protoFlag = (Integer) NettyCodecUtils.getAttachment(encodeCtx, Constants.ATTACHMENT_CUSTOM_PROTO_FLAG);
    if (protoFlag != null && protoFlag.intValue() == 1) {
        RpcResponse rpcResponse = getCustomPackage(channelHandlerContext, channel, cb, encodeCtx);
        if (rpcResponse == null) {
            return null;
        }
        if (messages == null) {
            messages = new ArrayList<Object>();
        }
        messages.add(rpcResponse);
        return messages;
    }
    int lastReadIndex = cb.readerIndex();
    while (cb.readable()) {
        if (cb.readableBytes() < Constants.PKG_LEAST_LENGTH) {
            setAttachment(channelHandlerContext, channel, cb, lastReadIndex);
            break;
        }
        byte stx = cb.readByte();
        if (stx != (byte) '(') {
            log.error("Invalid packge stx.");
            channelHandlerContext.getChannel().close();
            throw new IllegalArgumentException("Request decode error: invalid package stx " + stx);
        }
        int headLength = cb.readInt();
        int bodyLength = cb.readInt();
        if (cb.readableBytes() < headLength + bodyLength + 1) {
            setAttachment(channelHandlerContext, channel, cb, lastReadIndex);
            break;
        }
        byte[] headBytes = new byte[headLength];
        byte[] bodyBytes = new byte[bodyLength];
        cb.readBytes(headBytes);
        cb.readBytes(bodyBytes);
        byte etx = cb.readByte();
        if (etx != ')') {
            log.error("Invalid package etx.");
            channelHandlerContext.getChannel().close();
            throw new IllegalArgumentException("Request decode error: invalid package etx " + etx);
        }
        RpcResponse rpcResponse = null;
        //parse protobuf
        try {
            Head.CRpcHead pbHead = Head.CRpcHead.parseFrom(headBytes);
            String serviceMethodName = pbHead.getMethodName().toStringUtf8();
            //String[] splits = serviceMethodName.split(":");
            //String serviceName = splits[0];
            //String methodName = splits[1];
            rpcResponse = new RpcResponse();
            rpcResponse.setSeq(pbHead.getSequence());
            rpcResponse.setErrno(pbHead.getErr());
            if (pbHead.getErrMsg() != null && !pbHead.getErrMsg().isEmpty()) {
                rpcResponse.setError(new Exception(pbHead.getErrMsg().toStringUtf8()));
            } else {
                rpcResponse.setError(null);
            }
            if (pbHead.getErr() == 0) {
                rpcResponse.setResultObj(bodyBytes);
            } else {
                rpcResponse.setResultObj(null);
            }
        } catch (com.google.protobuf.InvalidProtocolBufferException ex) {
            rpcResponse = null;
            log.error("RPC Response parse failed.");
            throw new IllegalArgumentException("Response decode error: protobuf parse failed.");
        }
        if (rpcResponse != null) {
            if (messages == null) {
                messages = new ArrayList<Object>();
            }
            messages.add(rpcResponse);
            lastReadIndex = cb.readerIndex();
        } else {
            setAttachment(channelHandlerContext, channel, cb, lastReadIndex);
            break;
        }
    }
    return messages;
}
Also used : Head(srpc.Head) ChannelHandlerContext(org.jboss.netty.channel.ChannelHandlerContext) RpcResponse(org.msec.rpc.RpcResponse) ChannelBuffer(org.jboss.netty.buffer.ChannelBuffer) DynamicChannelBuffer(org.jboss.netty.buffer.DynamicChannelBuffer)

Aggregations

ChannelHandlerContext (org.jboss.netty.channel.ChannelHandlerContext)16 Test (org.junit.Test)8 Channel (org.jboss.netty.channel.Channel)7 ArrayList (java.util.ArrayList)6 Configuration (org.apache.hadoop.conf.Configuration)6 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)6 HttpRequest (org.jboss.netty.handler.codec.http.HttpRequest)6 HttpURLConnection (java.net.HttpURLConnection)5 URL (java.net.URL)5 DataOutputBuffer (org.apache.hadoop.io.DataOutputBuffer)5 AbstractChannel (org.jboss.netty.channel.AbstractChannel)5 SocketChannel (org.jboss.netty.channel.socket.SocketChannel)5 DefaultHttpResponse (org.jboss.netty.handler.codec.http.DefaultHttpResponse)5 HttpResponse (org.jboss.netty.handler.codec.http.HttpResponse)5 DataInputStream (java.io.DataInputStream)4 List (java.util.List)4 Map (java.util.Map)4 ShuffleHeader (org.apache.hadoop.mapreduce.task.reduce.ShuffleHeader)4 EOFException (java.io.EOFException)3 MessageEvent (org.jboss.netty.channel.MessageEvent)3