Search in sources :

Example 81 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestHCM method testGetOperationTimeout.

/**
   * Test that an operation can fail if we read the global operation timeout, even if the
   * individual timeout is fine. We do that with:
   * - client side: an operation timeout of 30 seconds
   * - server side: we sleep 20 second at each attempt. The first work fails, the second one
   * succeeds. But the client won't wait that much, because 20 + 20 > 30, so the client
   * timeouted when the server answers.
   */
@Test
public void testGetOperationTimeout() throws Exception {
    HTableDescriptor hdt = TEST_UTIL.createTableDescriptor(TableName.valueOf(name.getMethodName()));
    hdt.addCoprocessor(SleepAndFailFirstTime.class.getName());
    Table table = TEST_UTIL.createTable(hdt, new byte[][] { FAM_NAM }, TEST_UTIL.getConfiguration());
    table.setRpcTimeout(Integer.MAX_VALUE);
    SleepAndFailFirstTime.ct.set(0);
    // Check that it works if the timeout is big enough
    table.setOperationTimeout(120 * 1000);
    table.get(new Get(FAM_NAM));
    // Resetting and retrying. Will fail this time, not enough time for the second try
    SleepAndFailFirstTime.ct.set(0);
    try {
        table.setOperationTimeout(30 * 1000);
        table.get(new Get(FAM_NAM));
        Assert.fail("We expect an exception here");
    } catch (SocketTimeoutException e) {
        // The client has a CallTimeout class, but it's not shared.We're not very clean today,
        //  in the general case you can expect the call to stop, but the exception may vary.
        // In this test however, we're sure that it will be a socket timeout.
        LOG.info("We received an exception, as expected ", e);
    } catch (IOException e) {
        Assert.fail("Wrong exception:" + e.getMessage());
    } finally {
        table.close();
    }
}
Also used : SocketTimeoutException(java.net.SocketTimeoutException) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 82 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestHCM method testIncrementRpcTimeout.

@Test
public void testIncrementRpcTimeout() throws Exception {
    HTableDescriptor hdt = TEST_UTIL.createTableDescriptor(TableName.valueOf(name.getMethodName()));
    hdt.addCoprocessor(SleepCoprocessor.class.getName());
    Configuration c = new Configuration(TEST_UTIL.getConfiguration());
    try (Table t = TEST_UTIL.createTable(hdt, new byte[][] { FAM_NAM }, c)) {
        t.setWriteRpcTimeout(SleepCoprocessor.SLEEP_TIME / 2);
        t.setOperationTimeout(SleepCoprocessor.SLEEP_TIME * 100);
        Increment i = new Increment(FAM_NAM);
        i.addColumn(FAM_NAM, FAM_NAM, 1);
        t.increment(i);
        fail("Write should not have succeeded");
    } catch (RetriesExhaustedException e) {
    // expected
    }
    // Again, with configuration based override
    c.setInt(HConstants.HBASE_RPC_WRITE_TIMEOUT_KEY, SleepCoprocessor.SLEEP_TIME / 2);
    try (Connection conn = ConnectionFactory.createConnection(c)) {
        try (Table t = conn.getTable(hdt.getTableName())) {
            Increment i = new Increment(FAM_NAM);
            i.addColumn(FAM_NAM, FAM_NAM, 1);
            t.increment(i);
            fail("Write should not have succeeded");
        } catch (RetriesExhaustedException e) {
        // expected
        }
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 83 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestHCM method testGetRpcTimeout.

@Test
public void testGetRpcTimeout() throws Exception {
    HTableDescriptor hdt = TEST_UTIL.createTableDescriptor(TableName.valueOf(name.getMethodName()));
    hdt.addCoprocessor(SleepCoprocessor.class.getName());
    Configuration c = new Configuration(TEST_UTIL.getConfiguration());
    try (Table t = TEST_UTIL.createTable(hdt, new byte[][] { FAM_NAM }, c)) {
        t.setReadRpcTimeout(SleepCoprocessor.SLEEP_TIME / 2);
        t.setOperationTimeout(SleepCoprocessor.SLEEP_TIME * 100);
        t.get(new Get(FAM_NAM));
        fail("Get should not have succeeded");
    } catch (RetriesExhaustedException e) {
    // expected
    }
    // Again, with configuration based override
    c.setInt(HConstants.HBASE_RPC_READ_TIMEOUT_KEY, SleepCoprocessor.SLEEP_TIME / 2);
    try (Connection conn = ConnectionFactory.createConnection(c)) {
        try (Table t = conn.getTable(hdt.getTableName())) {
            t.get(new Get(FAM_NAM));
            fail("Get should not have succeeded");
        } catch (RetriesExhaustedException e) {
        // expected
        }
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 84 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestHCM method testServerBusyException.

@Test()
public void testServerBusyException() throws Exception {
    HTableDescriptor hdt = TEST_UTIL.createTableDescriptor(TableName.valueOf(name.getMethodName()));
    hdt.addCoprocessor(SleepCoprocessor.class.getName());
    Configuration c = new Configuration(TEST_UTIL.getConfiguration());
    TEST_UTIL.createTable(hdt, new byte[][] { FAM_NAM }, c);
    TestGetThread tg1 = new TestGetThread(TEST_UTIL.getConnection().getTable(hdt.getTableName()));
    TestGetThread tg2 = new TestGetThread(TEST_UTIL.getConnection().getTable(hdt.getTableName()));
    TestGetThread tg3 = new TestGetThread(TEST_UTIL.getConnection().getTable(hdt.getTableName()));
    TestGetThread tg4 = new TestGetThread(TEST_UTIL.getConnection().getTable(hdt.getTableName()));
    TestGetThread tg5 = new TestGetThread(TEST_UTIL.getConnection().getTable(hdt.getTableName()));
    tg1.start();
    tg2.start();
    tg3.start();
    tg4.start();
    tg5.start();
    tg1.join();
    tg2.join();
    tg3.join();
    tg4.join();
    tg5.join();
    assertEquals(2, tg1.getServerBusyException + tg2.getServerBusyException + tg3.getServerBusyException + tg4.getServerBusyException + tg5.getServerBusyException);
    // Put has its own logic in HTable, test Put alone. We use AsyncProcess for Put (use multi at
    // RPC level) and it wrap exceptions to RetriesExhaustedWithDetailsException.
    TestPutThread tp1 = new TestPutThread(TEST_UTIL.getConnection().getTable(hdt.getTableName()));
    TestPutThread tp2 = new TestPutThread(TEST_UTIL.getConnection().getTable(hdt.getTableName()));
    TestPutThread tp3 = new TestPutThread(TEST_UTIL.getConnection().getTable(hdt.getTableName()));
    TestPutThread tp4 = new TestPutThread(TEST_UTIL.getConnection().getTable(hdt.getTableName()));
    TestPutThread tp5 = new TestPutThread(TEST_UTIL.getConnection().getTable(hdt.getTableName()));
    tp1.start();
    tp2.start();
    tp3.start();
    tp4.start();
    tp5.start();
    tp1.join();
    tp2.join();
    tp3.join();
    tp4.join();
    tp5.join();
    assertEquals(2, tp1.getServerBusyException + tp2.getServerBusyException + tp3.getServerBusyException + tp4.getServerBusyException + tp5.getServerBusyException);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 85 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestHCM method testRpcRetryingCallerSleep.

/**
   * Test starting from 0 index when RpcRetryingCaller calculate the backoff time.
   */
@Test
public void testRpcRetryingCallerSleep() throws Exception {
    HTableDescriptor hdt = TEST_UTIL.createTableDescriptor(TableName.valueOf(name.getMethodName()));
    hdt.addCoprocessorWithSpec("|" + SleepAndFailFirstTime.class.getName() + "||" + SleepAndFailFirstTime.SLEEP_TIME_CONF_KEY + "=2000");
    TEST_UTIL.createTable(hdt, new byte[][] { FAM_NAM }).close();
    Configuration c = new Configuration(TEST_UTIL.getConfiguration());
    SleepAndFailFirstTime.ct.set(0);
    c.setInt(HConstants.HBASE_CLIENT_PAUSE, 3000);
    c.setInt(HConstants.HBASE_RPC_TIMEOUT_KEY, 4000);
    Connection connection = ConnectionFactory.createConnection(c);
    Table table = connection.getTable(TableName.valueOf(name.getMethodName()));
    table.setOperationTimeout(8000);
    // Check that it works. Because 2s + 3s * RETRY_BACKOFF[0] + 2s < 8s
    table.get(new Get(FAM_NAM));
    // Resetting and retrying.
    SleepAndFailFirstTime.ct.set(0);
    try {
        table.setOperationTimeout(6000);
        // Will fail this time. After sleep, there are not enough time for second retry
        // Beacuse 2s + 3s + 2s > 6s
        table.get(new Get(FAM_NAM));
        Assert.fail("We expect an exception here");
    } catch (SocketTimeoutException e) {
        LOG.info("We received an exception, as expected ", e);
    } catch (IOException e) {
        Assert.fail("Wrong exception:" + e.getMessage());
    } finally {
        table.close();
        connection.close();
    }
}
Also used : SocketTimeoutException(java.net.SocketTimeoutException) Configuration(org.apache.hadoop.conf.Configuration) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Aggregations

HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)867 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)555 Test (org.junit.Test)425 TableName (org.apache.hadoop.hbase.TableName)258 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)171 IOException (java.io.IOException)167 Put (org.apache.hadoop.hbase.client.Put)149 Table (org.apache.hadoop.hbase.client.Table)134 Path (org.apache.hadoop.fs.Path)127 Admin (org.apache.hadoop.hbase.client.Admin)121 Configuration (org.apache.hadoop.conf.Configuration)87 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)62 Connection (org.apache.hadoop.hbase.client.Connection)57 Scan (org.apache.hadoop.hbase.client.Scan)51 Cell (org.apache.hadoop.hbase.Cell)44 Delete (org.apache.hadoop.hbase.client.Delete)44 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)43