use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.
the class TestRSGroupsBalance method setupBalanceTest.
private ServerName setupBalanceTest(String newGroupName, TableName tableName) throws Exception {
addGroup(newGroupName, 3);
ADMIN.createNamespace(NamespaceDescriptor.create(tableName.getNamespaceAsString()).addConfiguration(RSGroupInfo.NAMESPACE_DESC_PROP_GROUP, newGroupName).build());
final TableDescriptor desc = TableDescriptorBuilder.newBuilder(tableName).setColumnFamily(ColumnFamilyDescriptorBuilder.of("f")).build();
byte[] startKey = Bytes.toBytes("aaaaa");
byte[] endKey = Bytes.toBytes("zzzzz");
ADMIN.createTable(desc, startKey, endKey, 6);
TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
List<String> regions = getTableRegionMap().get(tableName);
if (regions == null) {
return false;
}
return regions.size() >= 6;
}
});
// make assignment uneven, move all regions to one server
Map<ServerName, List<String>> assignMap = getTableServerRegionMap().get(tableName);
final ServerName first = assignMap.entrySet().iterator().next().getKey();
for (RegionInfo region : ADMIN.getRegions(tableName)) {
if (!assignMap.get(first).contains(region.getRegionNameAsString())) {
ADMIN.move(region.getEncodedNameAsBytes(), first);
}
}
TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate<Exception>() {
@Override
public boolean evaluate() throws Exception {
Map<ServerName, List<String>> map = getTableServerRegionMap().get(tableName);
if (map == null) {
return true;
}
List<String> regions = map.get(first);
if (regions == null) {
return true;
}
return regions.size() >= 6;
}
});
return first;
}
use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.
the class TestThriftHBaseServiceHandler method testMetricsPrecision.
/**
* See HBASE-17611
*
* Latency metrics were capped at ~ 2 seconds due to the use of an int variable to capture the
* duration.
*/
@Test
public void testMetricsPrecision() throws Exception {
byte[] rowkey = Bytes.toBytes("row1");
byte[] family = Bytes.toBytes("f");
byte[] col = Bytes.toBytes("c");
// create a table which will throw exceptions for requests
TableName tableName = TableName.valueOf("testMetricsPrecision");
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(tableName).setCoprocessor(DelayingRegionObserver.class.getName()).setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build();
Table table = null;
try {
table = UTIL.createTable(tableDescriptor, null);
table.put(new Put(rowkey).addColumn(family, col, Bytes.toBytes("val1")));
ThriftHBaseServiceHandler hbaseHandler = createHandler();
ThriftMetrics metrics = getMetrics(UTIL.getConfiguration());
THBaseService.Iface handler = HbaseHandlerMetricsProxy.newInstance(hbaseHandler, metrics, null);
ByteBuffer tTableName = wrap(tableName.getName());
// check metrics latency with a successful get
TGet tGet = new TGet(wrap(rowkey));
TResult tResult = handler.get(tTableName, tGet);
List<TColumnValue> expectedColumnValues = Lists.newArrayList(new TColumnValue(wrap(family), wrap(col), wrap(Bytes.toBytes("val1"))));
assertArrayEquals(rowkey, tResult.getRow());
List<TColumnValue> returnedColumnValues = tResult.getColumnValues();
assertTColumnValuesEqual(expectedColumnValues, returnedColumnValues);
metricsHelper.assertGaugeGt("get_max", 3000L, metrics.getSource());
} finally {
if (table != null) {
try {
table.close();
} catch (IOException ignored) {
}
UTIL.deleteTable(tableName);
}
}
}
use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.
the class TestThriftHBaseServiceHandler method beforeClass.
@BeforeClass
public static void beforeClass() throws Exception {
UTIL.getConfiguration().set("hbase.client.retries.number", "3");
UTIL.getConfiguration().setBoolean("hbase.regionserver.slowlog.buffer.enabled", true);
UTIL.getConfiguration().set("hbase.client.retries.number", "3");
UTIL.getConfiguration().setBoolean("hbase.security.authorization", true);
UTIL.getConfiguration().set("hbase.coprocessor.master.classes", "org.apache.hadoop.hbase.security.access.AccessController");
UTIL.getConfiguration().set("hbase.coprocessor.region.classes", "org.apache.hadoop.hbase.security.access.AccessController");
UTIL.getConfiguration().set("hbase.coprocessor.regionserver.classes", "org.apache.hadoop.hbase.security.access.AccessController");
// as we opened access control, we need to start as a superuser. Otherwise, we will not have
// sufficient permission to do operations.
UTIL.getConfiguration().set("hbase.superuser", System.getProperty("user.name"));
UTIL.startMiniCluster();
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableAname)).setColumnFamilies(Arrays.asList(families)).build();
try (Admin admin = UTIL.getAdmin()) {
admin.createTable(tableDescriptor);
}
}
use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.
the class TestThriftHBaseServiceHandler method testMetricsWithException.
@Test
public void testMetricsWithException() throws Exception {
byte[] rowkey = Bytes.toBytes("row1");
byte[] family = Bytes.toBytes("f");
byte[] col = Bytes.toBytes("c");
// create a table which will throw exceptions for requests
TableName tableName = TableName.valueOf(name.getMethodName());
TableDescriptor tableDesc = TableDescriptorBuilder.newBuilder(tableName).setCoprocessor(ErrorThrowingGetObserver.class.getName()).setColumnFamily(ColumnFamilyDescriptorBuilder.of(family)).build();
Table table = UTIL.createTable(tableDesc, null);
table.put(new Put(rowkey).addColumn(family, col, Bytes.toBytes("val1")));
ThriftHBaseServiceHandler hbaseHandler = createHandler();
ThriftMetrics metrics = getMetrics(UTIL.getConfiguration());
THBaseService.Iface handler = HbaseHandlerMetricsProxy.newInstance(hbaseHandler, metrics, null);
ByteBuffer tTableName = wrap(tableName.getName());
// check metrics increment with a successful get
long preGetCounter = metricsHelper.checkCounterExists("get_num_ops", metrics.getSource()) ? metricsHelper.getCounter("get_num_ops", metrics.getSource()) : 0;
TGet tGet = new TGet(wrap(rowkey));
TResult tResult = handler.get(tTableName, tGet);
List<TColumnValue> expectedColumnValues = Lists.newArrayList(new TColumnValue(wrap(family), wrap(col), wrap(Bytes.toBytes("val1"))));
assertArrayEquals(rowkey, tResult.getRow());
List<TColumnValue> returnedColumnValues = tResult.getColumnValues();
assertTColumnValuesEqual(expectedColumnValues, returnedColumnValues);
metricsHelper.assertCounter("get_num_ops", preGetCounter + 1, metrics.getSource());
// check metrics increment when the get throws each exception type
for (ErrorThrowingGetObserver.ErrorType type : ErrorThrowingGetObserver.ErrorType.values()) {
testExceptionType(handler, metrics, tTableName, rowkey, type);
}
}
use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.
the class TestThriftHBaseServiceHandler method testGetTableDescriptor.
@Test
public void testGetTableDescriptor() throws Exception {
ThriftHBaseServiceHandler handler = createHandler();
TTableDescriptor tableDescriptor = handler.getTableDescriptor(ThriftUtilities.tableNameFromHBase(TableName.valueOf(tableAname)));
TableDescriptor table = ThriftUtilities.tableDescriptorFromThrift(tableDescriptor);
assertTrue(table.getTableName().equals(TableName.valueOf(tableAname)));
assertTrue(table.getColumnFamilies().length == 2);
assertTrue(table.getColumnFamily(familyAname).getMaxVersions() == 3);
assertTrue(table.getColumnFamily(familyBname).getMaxVersions() == 2);
}
Aggregations