Search in sources :

Example 11 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestBackupBase method createTables.

protected static void createTables() throws Exception {
    long tid = EnvironmentEdgeManager.currentTime();
    table1 = TableName.valueOf("test-" + tid);
    Admin ha = TEST_UTIL.getAdmin();
    // Create namespaces
    ha.createNamespace(NamespaceDescriptor.create("ns1").build());
    ha.createNamespace(NamespaceDescriptor.create("ns2").build());
    ha.createNamespace(NamespaceDescriptor.create("ns3").build());
    ha.createNamespace(NamespaceDescriptor.create("ns4").build());
    TableDescriptor desc = TableDescriptorBuilder.newBuilder(table1).setColumnFamily(ColumnFamilyDescriptorBuilder.of(famName)).build();
    ha.createTable(desc);
    table1Desc = desc;
    Connection conn = ConnectionFactory.createConnection(conf1);
    Table table = conn.getTable(table1);
    loadTable(table);
    table.close();
    table2 = TableName.valueOf("ns2:test-" + tid + 1);
    desc = TableDescriptorBuilder.newBuilder(table2).setColumnFamily(ColumnFamilyDescriptorBuilder.of(famName)).build();
    ha.createTable(desc);
    table = conn.getTable(table2);
    loadTable(table);
    table.close();
    table3 = TableName.valueOf("ns3:test-" + tid + 2);
    table = TEST_UTIL.createTable(table3, famName);
    table.close();
    table4 = TableName.valueOf("ns4:test-" + tid + 3);
    table = TEST_UTIL.createTable(table4, famName);
    table.close();
    ha.close();
    conn.close();
}
Also used : BackupSystemTable(org.apache.hadoop.hbase.backup.impl.BackupSystemTable) Table(org.apache.hadoop.hbase.client.Table) Connection(org.apache.hadoop.hbase.client.Connection) Admin(org.apache.hadoop.hbase.client.Admin) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 12 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class MasterRpcServices method getRSGroupInfoOfTable.

@Override
public GetRSGroupInfoOfTableResponse getRSGroupInfoOfTable(RpcController controller, GetRSGroupInfoOfTableRequest request) throws ServiceException {
    TableName tableName = ProtobufUtil.toTableName(request.getTableName());
    LOG.info(server.getClientIdAuditPrefix() + " initiates rsgroup info retrieval, table=" + tableName);
    try {
        if (server.getMasterCoprocessorHost() != null) {
            server.getMasterCoprocessorHost().preGetRSGroupInfoOfTable(tableName);
        }
        GetRSGroupInfoOfTableResponse resp;
        TableDescriptor td = server.getTableDescriptors().get(tableName);
        if (td == null) {
            resp = GetRSGroupInfoOfTableResponse.getDefaultInstance();
        } else {
            RSGroupInfo rsGroupInfo = RSGroupUtil.getRSGroupInfo(server, server.getRSGroupInfoManager(), tableName).orElse(server.getRSGroupInfoManager().getRSGroup(RSGroupInfo.DEFAULT_GROUP));
            resp = GetRSGroupInfoOfTableResponse.newBuilder().setRSGroupInfo(ProtobufUtil.toProtoGroupInfo(rsGroupInfo)).build();
        }
        if (server.getMasterCoprocessorHost() != null) {
            server.getMasterCoprocessorHost().postGetRSGroupInfoOfTable(tableName);
        }
        return resp;
    } catch (IOException e) {
        throw new ServiceException(e);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) GetRSGroupInfoOfTableResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.RSGroupAdminProtos.GetRSGroupInfoOfTableResponse) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) RSGroupInfo(org.apache.hadoop.hbase.rsgroup.RSGroupInfo) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 13 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class AssignmentManager method markRegionAsMerged.

/**
 * When called here, the merge has happened. The merged regions have been
 * unassigned and the above markRegionClosed has been called on each so they have been
 * disassociated from a hosting Server. The merged region will be open after this call. The
 * merged regions are removed from hbase:meta below. Later they are deleted from the filesystem
 * by the catalog janitor running against hbase:meta. It notices when the merged region no
 * longer holds references to the old regions (References are deleted after a compaction
 * rewrites what the Reference points at but not until the archiver chore runs, are the
 * References removed).
 */
public void markRegionAsMerged(final RegionInfo child, final ServerName serverName, RegionInfo[] mergeParents) throws IOException {
    final RegionStateNode node = regionStates.getOrCreateRegionStateNode(child);
    node.setState(State.MERGED);
    for (RegionInfo ri : mergeParents) {
        regionStates.deleteRegion(ri);
    }
    TableDescriptor td = master.getTableDescriptors().get(child.getTable());
    regionStateStore.mergeRegions(child, mergeParents, serverName, td);
    if (shouldAssignFavoredNodes(child)) {
        getFavoredNodePromoter().generateFavoredNodesForMergedRegion(child, mergeParents);
    }
}
Also used : RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 14 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class MasterRpcServices method getTableDescriptors.

/**
 * Get list of TableDescriptors for requested tables.
 * @param c Unused (set to null).
 * @param req GetTableDescriptorsRequest that contains:
 *     - tableNames: requested tables, or if empty, all are requested.
 * @return GetTableDescriptorsResponse
 * @throws ServiceException
 */
@Override
public GetTableDescriptorsResponse getTableDescriptors(RpcController c, GetTableDescriptorsRequest req) throws ServiceException {
    try {
        server.checkInitialized();
        final String regex = req.hasRegex() ? req.getRegex() : null;
        final String namespace = req.hasNamespace() ? req.getNamespace() : null;
        List<TableName> tableNameList = null;
        if (req.getTableNamesCount() > 0) {
            tableNameList = new ArrayList<TableName>(req.getTableNamesCount());
            for (HBaseProtos.TableName tableNamePB : req.getTableNamesList()) {
                tableNameList.add(ProtobufUtil.toTableName(tableNamePB));
            }
        }
        List<TableDescriptor> descriptors = server.listTableDescriptors(namespace, regex, tableNameList, req.getIncludeSysTables());
        GetTableDescriptorsResponse.Builder builder = GetTableDescriptorsResponse.newBuilder();
        if (descriptors != null && descriptors.size() > 0) {
            // Add the table descriptors to the response
            for (TableDescriptor htd : descriptors) {
                builder.addTableSchema(ProtobufUtil.toTableSchema(htd));
            }
        }
        return builder.build();
    } catch (IOException ioe) {
        throw new ServiceException(ioe);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) ServiceException(org.apache.hbase.thirdparty.com.google.protobuf.ServiceException) ByteString(org.apache.hbase.thirdparty.com.google.protobuf.ByteString) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) GetTableDescriptorsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) HBaseProtos(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 15 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class MetricsMasterWrapperImpl method getRegionCounts.

@Override
public PairOfSameType<Integer> getRegionCounts() {
    try {
        if (!master.isInitialized()) {
            return new PairOfSameType<>(0, 0);
        }
        Integer onlineRegionCount = 0;
        Integer offlineRegionCount = 0;
        List<TableDescriptor> descriptors = master.listTableDescriptors(null, null, null, false);
        for (TableDescriptor htDesc : descriptors) {
            TableName tableName = htDesc.getTableName();
            Map<RegionState.State, List<RegionInfo>> tableRegions = master.getAssignmentManager().getRegionStates().getRegionByStateOfTable(tableName);
            onlineRegionCount += tableRegions.get(RegionState.State.OPEN).size();
            offlineRegionCount += tableRegions.get(RegionState.State.OFFLINE).size();
        }
        return new PairOfSameType<>(onlineRegionCount, offlineRegionCount);
    } catch (IOException e) {
        return new PairOfSameType<>(0, 0);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) List(java.util.List) IOException(java.io.IOException) PairOfSameType(org.apache.hadoop.hbase.util.PairOfSameType) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Aggregations

TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)639 Test (org.junit.Test)356 TableName (org.apache.hadoop.hbase.TableName)237 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)180 IOException (java.io.IOException)151 Put (org.apache.hadoop.hbase.client.Put)142 Admin (org.apache.hadoop.hbase.client.Admin)136 Path (org.apache.hadoop.fs.Path)124 Table (org.apache.hadoop.hbase.client.Table)121 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)96 Configuration (org.apache.hadoop.conf.Configuration)91 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)66 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)64 Connection (org.apache.hadoop.hbase.client.Connection)59 Scan (org.apache.hadoop.hbase.client.Scan)50 Get (org.apache.hadoop.hbase.client.Get)49 List (java.util.List)39