Search in sources :

Example 6 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestRSGroups method testDefaultNamespaceCreateAndAssign.

@Test
public void testDefaultNamespaceCreateAndAssign() throws Exception {
    LOG.info("testDefaultNamespaceCreateAndAssign");
    final byte[] tableName = Bytes.toBytes(tablePrefix + "_testCreateAndAssign");
    admin.modifyNamespace(NamespaceDescriptor.create("default").addConfiguration(RSGroupInfo.NAMESPACE_DESC_PROP_GROUP, "default").build());
    final HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor("f"));
    admin.createTable(desc);
    //wait for created table to be assigned
    TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate<Exception>() {

        @Override
        public boolean evaluate() throws Exception {
            return getTableRegionMap().get(desc.getTableName()) != null;
        }
    });
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Waiter(org.apache.hadoop.hbase.Waiter) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 7 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestRSGroups method testNamespaceCreateAndAssign.

@Test
public void testNamespaceCreateAndAssign() throws Exception {
    LOG.info("testNamespaceCreateAndAssign");
    String nsName = tablePrefix + "_foo";
    final TableName tableName = TableName.valueOf(nsName, tablePrefix + "_testCreateAndAssign");
    RSGroupInfo appInfo = addGroup("appInfo", 1);
    admin.createNamespace(NamespaceDescriptor.create(nsName).addConfiguration(RSGroupInfo.NAMESPACE_DESC_PROP_GROUP, "appInfo").build());
    final HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor("f"));
    admin.createTable(desc);
    //wait for created table to be assigned
    TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate<Exception>() {

        @Override
        public boolean evaluate() throws Exception {
            return getTableRegionMap().get(desc.getTableName()) != null;
        }
    });
    ServerName targetServer = ServerName.parseServerName(appInfo.getServers().iterator().next().toString());
    AdminProtos.AdminService.BlockingInterface rs = ((ClusterConnection) admin.getConnection()).getAdmin(targetServer);
    //verify it was assigned to the right group
    Assert.assertEquals(1, ProtobufUtil.getOnlineRegions(rs).size());
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) TableName(org.apache.hadoop.hbase.TableName) ClusterConnection(org.apache.hadoop.hbase.client.ClusterConnection) ServerName(org.apache.hadoop.hbase.ServerName) Waiter(org.apache.hadoop.hbase.Waiter) Test(org.junit.Test)

Example 8 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestRSGroupsBase method testKillRS.

@Test
public void testKillRS() throws Exception {
    RSGroupInfo appInfo = addGroup("appInfo", 1);
    final TableName tableName = TableName.valueOf(tablePrefix + "_ns", name.getMethodName());
    admin.createNamespace(NamespaceDescriptor.create(tableName.getNamespaceAsString()).addConfiguration(RSGroupInfo.NAMESPACE_DESC_PROP_GROUP, appInfo.getName()).build());
    final HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor("f"));
    admin.createTable(desc);
    //wait for created table to be assigned
    TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate<Exception>() {

        @Override
        public boolean evaluate() throws Exception {
            return getTableRegionMap().get(desc.getTableName()) != null;
        }
    });
    ServerName targetServer = ServerName.parseServerName(appInfo.getServers().iterator().next().toString());
    AdminProtos.AdminService.BlockingInterface targetRS = ((ClusterConnection) admin.getConnection()).getAdmin(targetServer);
    HRegionInfo targetRegion = ProtobufUtil.getOnlineRegions(targetRS).get(0);
    Assert.assertEquals(1, ProtobufUtil.getOnlineRegions(targetRS).size());
    try {
        //stopping may cause an exception
        //due to the connection loss
        targetRS.stopServer(null, AdminProtos.StopServerRequest.newBuilder().setReason("Die").build());
    } catch (Exception e) {
    }
    assertFalse(cluster.getClusterStatus().getServers().contains(targetServer));
    //wait for created table to be assigned
    TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate<Exception>() {

        @Override
        public boolean evaluate() throws Exception {
            return cluster.getClusterStatus().getRegionsInTransition().isEmpty();
        }
    });
    Set<Address> newServers = Sets.newHashSet();
    newServers.add(rsGroupAdmin.getRSGroupInfo(RSGroupInfo.DEFAULT_GROUP).getServers().iterator().next());
    rsGroupAdmin.moveServers(newServers, appInfo.getName());
    //Make sure all the table's regions get reassigned
    //disabling the table guarantees no conflicting assign/unassign (ie SSH) happens
    admin.disableTable(tableName);
    admin.enableTable(tableName);
    //wait for region to be assigned
    TEST_UTIL.waitFor(WAIT_TIMEOUT, new Waiter.Predicate<Exception>() {

        @Override
        public boolean evaluate() throws Exception {
            return cluster.getClusterStatus().getRegionsInTransition().isEmpty();
        }
    });
    targetServer = ServerName.parseServerName(newServers.iterator().next().toString());
    targetRS = ((ClusterConnection) admin.getConnection()).getAdmin(targetServer);
    Assert.assertEquals(1, ProtobufUtil.getOnlineRegions(targetRS).size());
    Assert.assertEquals(tableName, ProtobufUtil.getOnlineRegions(targetRS).get(0).getTable());
}
Also used : Address(org.apache.hadoop.hbase.net.Address) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) IOException(java.io.IOException) ConstraintException(org.apache.hadoop.hbase.constraint.ConstraintException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) TableName(org.apache.hadoop.hbase.TableName) ClusterConnection(org.apache.hadoop.hbase.client.ClusterConnection) ServerName(org.apache.hadoop.hbase.ServerName) Waiter(org.apache.hadoop.hbase.Waiter) Test(org.junit.Test)

Example 9 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class MasterRpcServices method getTableDescriptors.

/**
   * Get list of TableDescriptors for requested tables.
   * @param c Unused (set to null).
   * @param req GetTableDescriptorsRequest that contains:
   * - tableNames: requested tables, or if empty, all are requested
   * @return GetTableDescriptorsResponse
   * @throws ServiceException
   */
@Override
public GetTableDescriptorsResponse getTableDescriptors(RpcController c, GetTableDescriptorsRequest req) throws ServiceException {
    try {
        master.checkInitialized();
        final String regex = req.hasRegex() ? req.getRegex() : null;
        final String namespace = req.hasNamespace() ? req.getNamespace() : null;
        List<TableName> tableNameList = null;
        if (req.getTableNamesCount() > 0) {
            tableNameList = new ArrayList<TableName>(req.getTableNamesCount());
            for (HBaseProtos.TableName tableNamePB : req.getTableNamesList()) {
                tableNameList.add(ProtobufUtil.toTableName(tableNamePB));
            }
        }
        List<HTableDescriptor> descriptors = master.listTableDescriptors(namespace, regex, tableNameList, req.getIncludeSysTables());
        GetTableDescriptorsResponse.Builder builder = GetTableDescriptorsResponse.newBuilder();
        if (descriptors != null && descriptors.size() > 0) {
            // Add the table descriptors to the response
            for (HTableDescriptor htd : descriptors) {
                builder.addTableSchema(ProtobufUtil.convertToTableSchema(htd));
            }
        }
        return builder.build();
    } catch (IOException ioe) {
        throw new ServiceException(ioe);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) ServiceException(org.apache.hadoop.hbase.shaded.com.google.protobuf.ServiceException) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) GetTableDescriptorsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 10 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class HMaster method listTableNames.

/**
   * Returns the list of table names that match the specified request
   * @param regex The regular expression to match against, or null if querying for all
   * @param namespace the namespace to query, or null if querying for all
   * @param includeSysTables False to match only against userspace tables
   * @return the list of table names
   */
public List<TableName> listTableNames(final String namespace, final String regex, final boolean includeSysTables) throws IOException {
    List<HTableDescriptor> htds = new ArrayList<>();
    boolean bypass = cpHost != null ? cpHost.preGetTableNames(htds, regex) : false;
    if (!bypass) {
        htds = getTableDescriptors(htds, namespace, regex, null, includeSysTables);
        if (cpHost != null)
            cpHost.postGetTableNames(htds, regex);
    }
    List<TableName> result = new ArrayList<>(htds.size());
    for (HTableDescriptor htd : htds) result.add(htd.getTableName());
    return result;
}
Also used : TableName(org.apache.hadoop.hbase.TableName) ArrayList(java.util.ArrayList) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Aggregations

HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)867 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)555 Test (org.junit.Test)425 TableName (org.apache.hadoop.hbase.TableName)258 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)171 IOException (java.io.IOException)167 Put (org.apache.hadoop.hbase.client.Put)149 Table (org.apache.hadoop.hbase.client.Table)134 Path (org.apache.hadoop.fs.Path)127 Admin (org.apache.hadoop.hbase.client.Admin)121 Configuration (org.apache.hadoop.conf.Configuration)87 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)62 Connection (org.apache.hadoop.hbase.client.Connection)57 Scan (org.apache.hadoop.hbase.client.Scan)51 Cell (org.apache.hadoop.hbase.Cell)44 Delete (org.apache.hadoop.hbase.client.Delete)44 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)43