Search in sources :

Example 6 with Channel

use of org.jboss.netty.channel.Channel in project hadoop by apache.

the class TestShuffleHandler method testClientClosesConnection.

/**
   * Verify client prematurely closing a connection.
   *
   * @throws Exception exception.
   */
@Test(timeout = 10000)
public void testClientClosesConnection() throws Exception {
    final ArrayList<Throwable> failures = new ArrayList<Throwable>(1);
    Configuration conf = new Configuration();
    conf.setInt(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY, 0);
    ShuffleHandler shuffleHandler = new ShuffleHandler() {

        @Override
        protected Shuffle getShuffle(Configuration conf) {
            // replace the shuffle handler with one stubbed for testing
            return new Shuffle(conf) {

                @Override
                protected MapOutputInfo getMapOutputInfo(String mapId, int reduce, String jobId, String user) throws IOException {
                    return null;
                }

                @Override
                protected void populateHeaders(List<String> mapIds, String jobId, String user, int reduce, HttpRequest request, HttpResponse response, boolean keepAliveParam, Map<String, MapOutputInfo> infoMap) throws IOException {
                    // Only set response headers and skip everything else
                    // send some dummy value for content-length
                    super.setResponseHeaders(response, keepAliveParam, 100);
                }

                @Override
                protected void verifyRequest(String appid, ChannelHandlerContext ctx, HttpRequest request, HttpResponse response, URL requestUri) throws IOException {
                }

                @Override
                protected ChannelFuture sendMapOutput(ChannelHandlerContext ctx, Channel ch, String user, String mapId, int reduce, MapOutputInfo info) throws IOException {
                    // send a shuffle header and a lot of data down the channel
                    // to trigger a broken pipe
                    ShuffleHeader header = new ShuffleHeader("attempt_12345_1_m_1_0", 5678, 5678, 1);
                    DataOutputBuffer dob = new DataOutputBuffer();
                    header.write(dob);
                    ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
                    dob = new DataOutputBuffer();
                    for (int i = 0; i < 100000; ++i) {
                        header.write(dob);
                    }
                    return ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
                }

                @Override
                protected void sendError(ChannelHandlerContext ctx, HttpResponseStatus status) {
                    if (failures.size() == 0) {
                        failures.add(new Error());
                        ctx.getChannel().close();
                    }
                }

                @Override
                protected void sendError(ChannelHandlerContext ctx, String message, HttpResponseStatus status) {
                    if (failures.size() == 0) {
                        failures.add(new Error());
                        ctx.getChannel().close();
                    }
                }
            };
        }
    };
    shuffleHandler.init(conf);
    shuffleHandler.start();
    // simulate a reducer that closes early by reading a single shuffle header
    // then closing the connection
    URL url = new URL("http://127.0.0.1:" + shuffleHandler.getConfig().get(ShuffleHandler.SHUFFLE_PORT_CONFIG_KEY) + "/mapOutput?job=job_12345_1&reduce=1&map=attempt_12345_1_m_1_0");
    HttpURLConnection conn = (HttpURLConnection) url.openConnection();
    conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_NAME, ShuffleHeader.DEFAULT_HTTP_HEADER_NAME);
    conn.setRequestProperty(ShuffleHeader.HTTP_HEADER_VERSION, ShuffleHeader.DEFAULT_HTTP_HEADER_VERSION);
    conn.connect();
    DataInputStream input = new DataInputStream(conn.getInputStream());
    Assert.assertEquals(HttpURLConnection.HTTP_OK, conn.getResponseCode());
    Assert.assertEquals("close", conn.getHeaderField(HttpHeader.CONNECTION.asString()));
    ShuffleHeader header = new ShuffleHeader();
    header.readFields(input);
    input.close();
    shuffleHandler.stop();
    Assert.assertTrue("sendError called when client closed connection", failures.size() == 0);
}
Also used : HttpRequest(org.jboss.netty.handler.codec.http.HttpRequest) Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) HttpResponseStatus(org.jboss.netty.handler.codec.http.HttpResponseStatus) SocketChannel(org.jboss.netty.channel.socket.SocketChannel) Channel(org.jboss.netty.channel.Channel) AbstractChannel(org.jboss.netty.channel.AbstractChannel) ShuffleHeader(org.apache.hadoop.mapreduce.task.reduce.ShuffleHeader) ArrayList(java.util.ArrayList) DefaultHttpResponse(org.jboss.netty.handler.codec.http.DefaultHttpResponse) HttpResponse(org.jboss.netty.handler.codec.http.HttpResponse) ChannelHandlerContext(org.jboss.netty.channel.ChannelHandlerContext) DataInputStream(java.io.DataInputStream) URL(java.net.URL) HttpURLConnection(java.net.HttpURLConnection) DataOutputBuffer(org.apache.hadoop.io.DataOutputBuffer) List(java.util.List) ArrayList(java.util.ArrayList) Map(java.util.Map) Test(org.junit.Test)

Example 7 with Channel

use of org.jboss.netty.channel.Channel in project hadoop by apache.

the class ShuffleHandler method serviceStart.

// TODO change AbstractService to throw InterruptedException
@Override
protected void serviceStart() throws Exception {
    Configuration conf = getConfig();
    userRsrc = new ConcurrentHashMap<String, String>();
    secretManager = new JobTokenSecretManager();
    recoverState(conf);
    ServerBootstrap bootstrap = new ServerBootstrap(selector);
    try {
        pipelineFact = new HttpPipelineFactory(conf);
    } catch (Exception ex) {
        throw new RuntimeException(ex);
    }
    bootstrap.setOption("backlog", conf.getInt(SHUFFLE_LISTEN_QUEUE_SIZE, DEFAULT_SHUFFLE_LISTEN_QUEUE_SIZE));
    bootstrap.setOption("child.keepAlive", true);
    bootstrap.setPipelineFactory(pipelineFact);
    port = conf.getInt(SHUFFLE_PORT_CONFIG_KEY, DEFAULT_SHUFFLE_PORT);
    Channel ch = bootstrap.bind(new InetSocketAddress(port));
    accepted.add(ch);
    port = ((InetSocketAddress) ch.getLocalAddress()).getPort();
    conf.set(SHUFFLE_PORT_CONFIG_KEY, Integer.toString(port));
    pipelineFact.SHUFFLE.setPort(port);
    LOG.info(getName() + " listening on port " + port);
    super.serviceStart();
    sslFileBufferSize = conf.getInt(SUFFLE_SSL_FILE_BUFFER_SIZE_KEY, DEFAULT_SUFFLE_SSL_FILE_BUFFER_SIZE);
    connectionKeepAliveEnabled = conf.getBoolean(SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED, DEFAULT_SHUFFLE_CONNECTION_KEEP_ALIVE_ENABLED);
    connectionKeepAliveTimeOut = Math.max(1, conf.getInt(SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT, DEFAULT_SHUFFLE_CONNECTION_KEEP_ALIVE_TIME_OUT));
    mapOutputMetaInfoCacheSize = Math.max(1, conf.getInt(SHUFFLE_MAPOUTPUT_META_INFO_CACHE_SIZE, DEFAULT_SHUFFLE_MAPOUTPUT_META_INFO_CACHE_SIZE));
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) YarnConfiguration(org.apache.hadoop.yarn.conf.YarnConfiguration) JobTokenSecretManager(org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager) InetSocketAddress(java.net.InetSocketAddress) Channel(org.jboss.netty.channel.Channel) JniDBFactory.asString(org.fusesource.leveldbjni.JniDBFactory.asString) ByteString(com.google.protobuf.ByteString) ServerBootstrap(org.jboss.netty.bootstrap.ServerBootstrap) ClosedChannelException(java.nio.channels.ClosedChannelException) IOException(java.io.IOException) ExecutionException(java.util.concurrent.ExecutionException) TooLongFrameException(org.jboss.netty.handler.codec.frame.TooLongFrameException) FileNotFoundException(java.io.FileNotFoundException) DBException(org.iq80.leveldb.DBException)

Example 8 with Channel

use of org.jboss.netty.channel.Channel in project storm by apache.

the class SaslStormClientHandler method channelConnected.

@Override
public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent event) {
    // register the newly established channel
    Channel channel = ctx.getChannel();
    client.channelConnected(channel);
    try {
        SaslNettyClient saslNettyClient = SaslNettyClientState.getSaslNettyClient.get(channel);
        if (saslNettyClient == null) {
            LOG.debug("Creating saslNettyClient now " + "for channel: " + channel);
            saslNettyClient = new SaslNettyClient(name, token);
            SaslNettyClientState.getSaslNettyClient.set(channel, saslNettyClient);
        }
        LOG.debug("Sending SASL_TOKEN_MESSAGE_REQUEST");
        channel.write(ControlMessage.SASL_TOKEN_MESSAGE_REQUEST);
    } catch (Exception e) {
        LOG.error("Failed to authenticate with server " + "due to error: ", e);
    }
}
Also used : Channel(org.jboss.netty.channel.Channel) IOException(java.io.IOException)

Example 9 with Channel

use of org.jboss.netty.channel.Channel in project storm by apache.

the class SaslStormServerAuthorizeHandler method messageReceived.

@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e) {
    Object msg = e.getMessage();
    if (msg == null)
        return;
    Channel channel = ctx.getChannel();
    LOG.debug("messageReceived: Checking whether the client is authorized to send messages to the server ");
    // Authorize: client is allowed to doRequest() if and only if the client
    // has successfully authenticated with this server.
    SaslNettyServer saslNettyServer = SaslNettyServerState.getSaslNettyServer.get(channel);
    if (saslNettyServer == null) {
        LOG.warn("messageReceived: This client is *NOT* authorized to perform " + "this action since there's no saslNettyServer to " + "authenticate the client: " + "refusing to perform requested action: " + msg);
        return;
    }
    if (!saslNettyServer.isComplete()) {
        LOG.warn("messageReceived: This client is *NOT* authorized to perform " + "this action because SASL authentication did not complete: " + "refusing to perform requested action: " + msg);
        // not authorized.
        return;
    }
    LOG.debug("messageReceived: authenticated client: " + saslNettyServer.getUserName() + " is authorized to do request " + "on server.");
    // We call fireMessageReceived since the client is allowed to perform
    // this request. The client's request will now proceed to the next
    // pipeline component.
    Channels.fireMessageReceived(ctx, msg);
}
Also used : Channel(org.jboss.netty.channel.Channel)

Example 10 with Channel

use of org.jboss.netty.channel.Channel in project storm by apache.

the class PacemakerClient method send.

public HBMessage send(HBMessage m) throws PacemakerConnectionException {
    LOG.debug("Sending message: {}", m.toString());
    try {
        int next = availableMessageSlots.take();
        synchronized (m) {
            m.set_message_id(next);
            messages[next] = m;
            LOG.debug("Put message in slot: {}", Integer.toString(next));
            do {
                waitUntilReady();
                Channel channel = channelRef.get();
                if (channel != null) {
                    channel.write(m);
                    m.wait(1000);
                }
            } while (messages[next] == m);
        }
        HBMessage ret = messages[next];
        if (ret == null) {
            // This can happen if we lost the connection and subsequently reconnected or timed out.
            send(m);
        }
        messages[next] = null;
        LOG.debug("Got Response: {}", ret);
        return ret;
    } catch (InterruptedException e) {
        LOG.error("PacemakerClient send interrupted: ", e);
        throw new RuntimeException(e);
    }
}
Also used : Channel(org.jboss.netty.channel.Channel) HBMessage(org.apache.storm.generated.HBMessage)

Aggregations

Channel (org.jboss.netty.channel.Channel)176 InetSocketAddress (java.net.InetSocketAddress)53 Test (org.junit.Test)52 HttpRequest (org.jboss.netty.handler.codec.http.HttpRequest)38 DefaultHttpRequest (org.jboss.netty.handler.codec.http.DefaultHttpRequest)36 DefaultHttpResponse (org.jboss.netty.handler.codec.http.DefaultHttpResponse)34 SocketAddress (java.net.SocketAddress)33 HttpResponse (org.jboss.netty.handler.codec.http.HttpResponse)33 ChannelFuture (org.jboss.netty.channel.ChannelFuture)31 PrepareForTest (org.powermock.core.classloader.annotations.PrepareForTest)30 ChannelPipeline (org.jboss.netty.channel.ChannelPipeline)24 Test (org.testng.annotations.Test)23 ConditionCheck (com.linkedin.databus2.test.ConditionCheck)22 SimpleObjectCaptureHandler (com.linkedin.databus2.test.container.SimpleObjectCaptureHandler)19 IOException (java.io.IOException)19 Logger (org.apache.log4j.Logger)19 ChannelBuffer (org.jboss.netty.buffer.ChannelBuffer)17 DefaultHttpChunk (org.jboss.netty.handler.codec.http.DefaultHttpChunk)15 ArrayList (java.util.ArrayList)14 HttpChunk (org.jboss.netty.handler.codec.http.HttpChunk)14