Search in sources :

Example 1 with EventLoop

use of org.apache.hbase.thirdparty.io.netty.channel.EventLoop in project hbase by apache.

the class TestSaslFanOutOneBlockAsyncDFSOutput method test.

private void test(Path file) throws IOException, InterruptedException, ExecutionException {
    EventLoop eventLoop = EVENT_LOOP_GROUP.next();
    FanOutOneBlockAsyncDFSOutput out = FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, file, true, false, (short) 3, FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, MONITOR);
    TestFanOutOneBlockAsyncDFSOutput.writeAndVerify(FS, file, out);
}
Also used : EventLoop(org.apache.hbase.thirdparty.io.netty.channel.EventLoop)

Example 2 with EventLoop

use of org.apache.hbase.thirdparty.io.netty.channel.EventLoop in project hbase by apache.

the class TestFanOutOneBlockAsyncDFSOutputHang method setUp.

@BeforeClass
public static void setUp() throws Exception {
    startMiniDFSCluster(2);
    FS = CLUSTER.getFileSystem();
    EVENT_LOOP_GROUP = new NioEventLoopGroup();
    CHANNEL_CLASS = NioSocketChannel.class;
    MONITOR = StreamSlowMonitor.create(UTIL.getConfiguration(), "testMonitor");
    Path f = new Path("/testHang");
    EventLoop eventLoop = EVENT_LOOP_GROUP.next();
    OUT = FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true, false, (short) 2, FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, MONITOR);
}
Also used : Path(org.apache.hadoop.fs.Path) EventLoop(org.apache.hbase.thirdparty.io.netty.channel.EventLoop) NioEventLoopGroup(org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoopGroup) BeforeClass(org.junit.BeforeClass)

Example 3 with EventLoop

use of org.apache.hbase.thirdparty.io.netty.channel.EventLoop in project hbase by apache.

the class TestFanOutOneBlockAsyncDFSOutput method testRecover.

@Test
public void testRecover() throws IOException, InterruptedException, ExecutionException {
    Path f = new Path("/" + name.getMethodName());
    EventLoop eventLoop = EVENT_LOOP_GROUP.next();
    FanOutOneBlockAsyncDFSOutput out = FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true, false, (short) 3, FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, MONITOR);
    byte[] b = new byte[10];
    ThreadLocalRandom.current().nextBytes(b);
    out.write(b, 0, b.length);
    out.flush(false).get();
    // restart one datanode which causes one connection broken
    CLUSTER.restartDataNode(0);
    out.write(b, 0, b.length);
    try {
        out.flush(false).get();
        fail("flush should fail");
    } catch (ExecutionException e) {
        // we restarted one datanode so the flush should fail
        LOG.info("expected exception caught", e);
    }
    out.recoverAndClose(null);
    assertEquals(b.length, FS.getFileStatus(f).getLen());
    byte[] actual = new byte[b.length];
    try (FSDataInputStream in = FS.open(f)) {
        in.readFully(actual);
    }
    assertArrayEquals(b, actual);
}
Also used : Path(org.apache.hadoop.fs.Path) EventLoop(org.apache.hbase.thirdparty.io.netty.channel.EventLoop) FSDataInputStream(org.apache.hadoop.fs.FSDataInputStream) ExecutionException(java.util.concurrent.ExecutionException) Test(org.junit.Test)

Example 4 with EventLoop

use of org.apache.hbase.thirdparty.io.netty.channel.EventLoop in project hbase by apache.

the class TestFanOutOneBlockAsyncDFSOutput method testConnectToDatanodeFailed.

@Test
public void testConnectToDatanodeFailed() throws IOException, ClassNotFoundException, NoSuchMethodException, IllegalAccessException, InvocationTargetException, InterruptedException, NoSuchFieldException {
    Field xceiverServerDaemonField = DataNode.class.getDeclaredField("dataXceiverServer");
    xceiverServerDaemonField.setAccessible(true);
    Class<?> xceiverServerClass = Class.forName("org.apache.hadoop.hdfs.server.datanode.DataXceiverServer");
    Method numPeersMethod = xceiverServerClass.getDeclaredMethod("getNumPeers");
    numPeersMethod.setAccessible(true);
    // make one datanode broken
    DataNodeProperties dnProp = CLUSTER.stopDataNode(0);
    Path f = new Path("/test");
    EventLoop eventLoop = EVENT_LOOP_GROUP.next();
    try (FanOutOneBlockAsyncDFSOutput output = FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true, false, (short) 3, FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, MONITOR)) {
        // should exclude the dead dn when retry so here we only have 2 DNs in pipeline
        assertEquals(2, output.getPipeline().length);
    } finally {
        CLUSTER.restartDataNode(dnProp);
    }
}
Also used : Path(org.apache.hadoop.fs.Path) Field(java.lang.reflect.Field) EventLoop(org.apache.hbase.thirdparty.io.netty.channel.EventLoop) DataNodeProperties(org.apache.hadoop.hdfs.MiniDFSCluster.DataNodeProperties) Method(java.lang.reflect.Method) Test(org.junit.Test)

Example 5 with EventLoop

use of org.apache.hbase.thirdparty.io.netty.channel.EventLoop in project hbase by apache.

the class TestFanOutOneBlockAsyncDFSOutput method testCreateParentFailed.

/**
 * This is important for fencing when recover from RS crash.
 */
@Test
public void testCreateParentFailed() throws IOException {
    Path f = new Path("/" + name.getMethodName() + "/test");
    EventLoop eventLoop = EVENT_LOOP_GROUP.next();
    try {
        FanOutOneBlockAsyncDFSOutputHelper.createOutput(FS, f, true, false, (short) 3, FS.getDefaultBlockSize(), eventLoop, CHANNEL_CLASS, MONITOR);
        fail("should fail with parent does not exist");
    } catch (RemoteException e) {
        LOG.info("expected exception caught", e);
        assertThat(e.unwrapRemoteException(), instanceOf(FileNotFoundException.class));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) EventLoop(org.apache.hbase.thirdparty.io.netty.channel.EventLoop) RemoteException(org.apache.hadoop.ipc.RemoteException) Test(org.junit.Test)

Aggregations

EventLoop (org.apache.hbase.thirdparty.io.netty.channel.EventLoop)11 Path (org.apache.hadoop.fs.Path)8 Test (org.junit.Test)8 IOException (java.io.IOException)2 Field (java.lang.reflect.Field)2 Method (java.lang.reflect.Method)2 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)2 DataNodeProperties (org.apache.hadoop.hdfs.MiniDFSCluster.DataNodeProperties)2 InvocationTargetException (java.lang.reflect.InvocationTargetException)1 CompletableFuture (java.util.concurrent.CompletableFuture)1 ExecutionException (java.util.concurrent.ExecutionException)1 TimeoutException (java.util.concurrent.TimeoutException)1 MutableInt (org.apache.commons.lang3.mutable.MutableInt)1 TimeoutIOException (org.apache.hadoop.hbase.exceptions.TimeoutIOException)1 ExcludeDatanodeManager (org.apache.hadoop.hbase.io.asyncfs.monitor.ExcludeDatanodeManager)1 StreamSlowMonitor (org.apache.hadoop.hbase.io.asyncfs.monitor.StreamSlowMonitor)1 RemoteException (org.apache.hadoop.ipc.RemoteException)1 DefaultEventLoop (org.apache.hbase.thirdparty.io.netty.channel.DefaultEventLoop)1 NioEventLoopGroup (org.apache.hbase.thirdparty.io.netty.channel.nio.NioEventLoopGroup)1 BeforeClass (org.junit.BeforeClass)1