Search in sources :

Example 96 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestRSGroupsBalance method setupBalanceTest.

private ServerName setupBalanceTest(String newGroupName, TableName tableName) throws Exception {
    addGroup(newGroupName, 3);
    ADMIN.createNamespace(NamespaceDescriptor.create(tableName.getNamespaceAsString()).addConfiguration(RSGroupInfo.NAMESPACE_DESC_PROP_GROUP, newGroupName).build());
    final TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName).setColumnFamily(ColumnFamilyDescriptorBuilder.of("f")).build();
    byte[] startKey = Bytes.toBytes("aaaaa");
    byte[] endKey = Bytes.toBytes("zzzzz");
    ADMIN.createTable(desc, startKey, endKey, 6);
    TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate<Exception>() {

        @Override
        public boolean evaluate() throws Exception {
            List<String> regions = getTableRegionMap().get(tableName);
            if (regions == null) {
                return false;
            }
            return regions.size() >= 6;
        }
    });
    // make assignment uneven, move all regions to one server
    Map<ServerName, List<String>> assignMap = getTableServerRegionMap().get(tableName);
    final ServerName first = assignMap.entrySet().iterator().next().getKey();
    for (RegionInfo region : ADMIN.getRegions(tableName)) {
        if (!assignMap.get(first).contains(region.getRegionNameAsString())) {
            ADMIN.move(region.getEncodedNameAsBytes(), first);
        }
    }
    TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate<Exception>() {

        @Override
        public boolean evaluate() throws Exception {
            Map<ServerName, List<String>> map = getTableServerRegionMap().get(tableName);
            if (map == null) {
                return true;
            }
            List<String> regions = map.get(first);
            if (regions == null) {
                return true;
            }
            return regions.size() >= 6;
        }
    });
    return first;
}
Also used : ServerName(org.apache.hadoop.hbase.ServerName) List(java.util.List) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) Waiter(org.apache.hadoop.hbase.Waiter) Map(java.util.Map) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 97 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestThriftHBaseServiceHandler method testMetricsPrecision.

/**
 * See HBASE-17611
 *
 * Latency metrics were capped at ~ 2 seconds due to the use of an int variable to capture the
 * duration.
 */
@Test
public void testMetricsPrecision() throws Exception {
    byte[] rowkey = Bytes.toBytes("row1");
    byte[] family = Bytes.toBytes("f");
    byte[] col = Bytes.toBytes("c");
    // create a table which will throw exceptions for requests
    TableName tableName = TableName.valueOf("testMetricsPrecision");
    TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName).setCoprocessor(DelayingRegionObserver.class.getName()).setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build();
    Table table = null;
    try {
        table = UTIL.createTable(tableDescriptor, null);
        table.put(new Put(rowkey).addColumn(family, col, Bytes.toBytes("val1")));
        ThriftHBaseServiceHandler hbaseHandler = createHandler();
        ThriftMetrics metrics = getMetrics(UTIL.getConfiguration());
        THBaseService.Iface handler = HbaseHandlerMetricsProxy.newInstance(hbaseHandler, metrics, null);
        ByteBuffer tTableName = wrap(tableName.getName());
        // check metrics latency with a successful get
        TGet tGet = new TGet(wrap(rowkey));
        TResult tResult = handler.get(tTableName, tGet);
        List<TColumnValue> expectedColumnValues = Lists.newArrayList(new TColumnValue(wrap(family), wrap(col), wrap(Bytes.toBytes("val1"))));
        assertArrayEquals(rowkey, tResult.getRow());
        List<TColumnValue> returnedColumnValues = tResult.getColumnValues();
        assertTColumnValuesEqual(expectedColumnValues, returnedColumnValues);
        metricsHelper.assertGaugeGt("get_max", 3000L, metrics.getSource());
    } finally {
        if (table != null) {
            try {
                table.close();
            } catch (IOException ignored) {
            }
            UTIL.deleteTable(tableName);
        }
    }
}
Also used : Table(org.apache.hadoop.hbase.client.Table) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) ByteBuffer(java.nio.ByteBuffer) TTableDescriptor(org.apache.hadoop.hbase.thrift2.generated.TTableDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Put(org.apache.hadoop.hbase.client.Put) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) TableName(org.apache.hadoop.hbase.TableName) TTableName(org.apache.hadoop.hbase.thrift2.generated.TTableName) ThriftMetrics(org.apache.hadoop.hbase.thrift.ThriftMetrics) THBaseService(org.apache.hadoop.hbase.thrift2.generated.THBaseService) Test(org.junit.Test)

Example 98 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestThriftHBaseServiceHandler method beforeClass.

@BeforeClass
public static void beforeClass() throws Exception {
    UTIL.getConfiguration().set("hbase.client.retries.number", "3");
    UTIL.getConfiguration().setBoolean("hbase.regionserver.slowlog.buffer.enabled", true);
    UTIL.getConfiguration().set("hbase.client.retries.number", "3");
    UTIL.getConfiguration().setBoolean("hbase.security.authorization", true);
    UTIL.getConfiguration().set("hbase.coprocessor.master.classes", "org.apache.hadoop.hbase.security.access.AccessController");
    UTIL.getConfiguration().set("hbase.coprocessor.region.classes", "org.apache.hadoop.hbase.security.access.AccessController");
    UTIL.getConfiguration().set("hbase.coprocessor.regionserver.classes", "org.apache.hadoop.hbase.security.access.AccessController");
    // as we opened access control, we need to start as a superuser. Otherwise, we will not have
    // sufficient permission to do operations.
    UTIL.getConfiguration().set("hbase.superuser", System.getProperty("user.name"));
    UTIL.startMiniCluster();
    TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableAname)).setColumnFamilies(Arrays.asList(families)).build();
    try (Admin admin = UTIL.getAdmin()) {
        admin.createTable(tableDescriptor);
    }
}
Also used : Admin(org.apache.hadoop.hbase.client.Admin) TTableDescriptor(org.apache.hadoop.hbase.thrift2.generated.TTableDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) BeforeClass(org.junit.BeforeClass)

Example 99 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestThriftHBaseServiceHandler method testMetricsWithException.

@Test
public void testMetricsWithException() throws Exception {
    byte[] rowkey = Bytes.toBytes("row1");
    byte[] family = Bytes.toBytes("f");
    byte[] col = Bytes.toBytes("c");
    // create a table which will throw exceptions for requests
    TableName tableName = TableName.valueOf(name.getMethodName());
    TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(tableName).setCoprocessor(ErrorThrowingGetObserver.class.getName()).setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build();
    Table table = UTIL.createTable(tableDesc, null);
    table.put(new Put(rowkey).addColumn(family, col, Bytes.toBytes("val1")));
    ThriftHBaseServiceHandler hbaseHandler = createHandler();
    ThriftMetrics metrics = getMetrics(UTIL.getConfiguration());
    THBaseService.Iface handler = HbaseHandlerMetricsProxy.newInstance(hbaseHandler, metrics, null);
    ByteBuffer tTableName = wrap(tableName.getName());
    // check metrics increment with a successful get
    long preGetCounter = metricsHelper.checkCounterExists("get_num_ops", metrics.getSource()) ? metricsHelper.getCounter("get_num_ops", metrics.getSource()) : 0;
    TGet tGet = new TGet(wrap(rowkey));
    TResult tResult = handler.get(tTableName, tGet);
    List<TColumnValue> expectedColumnValues = Lists.newArrayList(new TColumnValue(wrap(family), wrap(col), wrap(Bytes.toBytes("val1"))));
    assertArrayEquals(rowkey, tResult.getRow());
    List<TColumnValue> returnedColumnValues = tResult.getColumnValues();
    assertTColumnValuesEqual(expectedColumnValues, returnedColumnValues);
    metricsHelper.assertCounter("get_num_ops", preGetCounter + 1, metrics.getSource());
    // check metrics increment when the get throws each exception type
    for (ErrorThrowingGetObserver.ErrorType type : ErrorThrowingGetObserver.ErrorType.values()) {
        testExceptionType(handler, metrics, tTableName, rowkey, type);
    }
}
Also used : ErrorThrowingGetObserver(org.apache.hadoop.hbase.thrift.ErrorThrowingGetObserver) Table(org.apache.hadoop.hbase.client.Table) TGet(org.apache.hadoop.hbase.thrift2.generated.TGet) TColumnValue(org.apache.hadoop.hbase.thrift2.generated.TColumnValue) ByteBuffer(java.nio.ByteBuffer) TTableDescriptor(org.apache.hadoop.hbase.thrift2.generated.TTableDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) TPut(org.apache.hadoop.hbase.thrift2.generated.TPut) Put(org.apache.hadoop.hbase.client.Put) TResult(org.apache.hadoop.hbase.thrift2.generated.TResult) TableName(org.apache.hadoop.hbase.TableName) TTableName(org.apache.hadoop.hbase.thrift2.generated.TTableName) ThriftMetrics(org.apache.hadoop.hbase.thrift.ThriftMetrics) THBaseService(org.apache.hadoop.hbase.thrift2.generated.THBaseService) Test(org.junit.Test)

Example 100 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestThriftHBaseServiceHandler method testGetTableDescriptor.

@Test
public void testGetTableDescriptor() throws Exception {
    ThriftHBaseServiceHandler handler = createHandler();
    TTableDescriptor tableDescriptor = handler.getTableDescriptor(ThriftUtilities.tableNameFromHBase(TableName.valueOf(tableAname)));
    TableDescriptor table = ThriftUtilities.tableDescriptorFromThrift(tableDescriptor);
    assertTrue(table.getTableName().equals(TableName.valueOf(tableAname)));
    assertTrue(table.getColumnFamilies().length == 2);
    assertTrue(table.getColumnFamily(familyAname).getMaxVersions() == 3);
    assertTrue(table.getColumnFamily(familyBname).getMaxVersions() == 2);
}
Also used : TTableDescriptor(org.apache.hadoop.hbase.thrift2.generated.TTableDescriptor) TTableDescriptor(org.apache.hadoop.hbase.thrift2.generated.TTableDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Test(org.junit.Test)

Aggregations

TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)639 Test (org.junit.Test)356 TableName (org.apache.hadoop.hbase.TableName)237 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)180 IOException (java.io.IOException)151 Put (org.apache.hadoop.hbase.client.Put)142 Admin (org.apache.hadoop.hbase.client.Admin)136 Path (org.apache.hadoop.fs.Path)124 Table (org.apache.hadoop.hbase.client.Table)121 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)96 Configuration (org.apache.hadoop.conf.Configuration)91 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)66 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)64 Connection (org.apache.hadoop.hbase.client.Connection)59 Scan (org.apache.hadoop.hbase.client.Scan)50 Get (org.apache.hadoop.hbase.client.Get)49 List (java.util.List)39