Search in sources :

Example 1 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class AsyncHBaseAdmin method getTableDescriptor.

@Override
public CompletableFuture<HTableDescriptor> getTableDescriptor(TableName tableName) {
    CompletableFuture<HTableDescriptor> future = new CompletableFuture<>();
    this.<List<TableSchema>>newMasterCaller().action((controller, stub) -> this.<GetTableDescriptorsRequest, GetTableDescriptorsResponse, List<TableSchema>>call(controller, stub, RequestConverter.buildGetTableDescriptorsRequest(tableName), (s, c, req, done) -> s.getTableDescriptors(c, req, done), (resp) -> resp.getTableSchemaList())).call().whenComplete((tableSchemas, error) -> {
        if (error != null) {
            future.completeExceptionally(error);
            return;
        }
        if (!tableSchemas.isEmpty()) {
            future.complete(ProtobufUtil.convertToHTableDesc(tableSchemas.get(0)));
        } else {
            future.completeExceptionally(new TableNotFoundException(tableName.getNameAsString()));
        }
    });
    return future;
}
Also used : GetSchemaAlterStatusRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusRequest) Arrays(java.util.Arrays) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) CreateTableResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableResponse) AsyncMetaTableAccessor(org.apache.hadoop.hbase.AsyncMetaTableAccessor) DeleteTableResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableResponse) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) RegionLocations(org.apache.hadoop.hbase.RegionLocations) GetTableDescriptorsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsResponse) IsBalancerEnabledRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledRequest) DisableTableResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableResponse) NotServingRegionException(org.apache.hadoop.hbase.NotServingRegionException) GetNamespaceDescriptorRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorRequest) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) IsBalancerEnabledResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.IsBalancerEnabledResponse) ForeignExceptionUtil(org.apache.hadoop.hbase.util.ForeignExceptionUtil) DeleteNamespaceRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceRequest) CreateNamespaceResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceResponse) InterfaceStability(org.apache.hadoop.hbase.classification.InterfaceStability) CreateTableRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateTableRequest) Pair(org.apache.hadoop.hbase.util.Pair) TruncateTableRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableRequest) RpcCallback(org.apache.hadoop.hbase.shaded.com.google.protobuf.RpcCallback) ProtobufUtil(org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil) EnableTableRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableRequest) CloseRegionRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionRequest) AddColumnResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnResponse) HBaseRpcController(org.apache.hadoop.hbase.ipc.HBaseRpcController) MasterService(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.MasterService) SetBalancerRunningResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningResponse) List(java.util.List) BalanceResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceResponse) UnknownRegionException(org.apache.hadoop.hbase.UnknownRegionException) ModifyNamespaceRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceRequest) GetTableNamesRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesRequest) BalanceRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.BalanceRequest) Pattern(java.util.regex.Pattern) LogFactory(org.apache.commons.logging.LogFactory) InterfaceAudience(org.apache.hadoop.hbase.classification.InterfaceAudience) AdminRequestCallerBuilder(org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.AdminRequestCallerBuilder) MetaTableAccessor(org.apache.hadoop.hbase.MetaTableAccessor) META_TABLE_NAME(org.apache.hadoop.hbase.TableName.META_TABLE_NAME) GetTableDescriptorsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableDescriptorsRequest) ModifyColumnRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnRequest) EnableTableResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.EnableTableResponse) ListNamespaceDescriptorsResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsResponse) NamespaceDescriptor(org.apache.hadoop.hbase.NamespaceDescriptor) GetProcedureResultRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultRequest) CompletableFuture(java.util.concurrent.CompletableFuture) AddColumnRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.AddColumnRequest) DeleteTableRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteTableRequest) GetNamespaceDescriptorResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetNamespaceDescriptorResponse) HConstants(org.apache.hadoop.hbase.HConstants) DisableTableRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DisableTableRequest) BiConsumer(java.util.function.BiConsumer) LinkedList(java.util.LinkedList) ModifyColumnResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyColumnResponse) ServerName(org.apache.hadoop.hbase.ServerName) Bytes(org.apache.hadoop.hbase.util.Bytes) ReadType(org.apache.hadoop.hbase.client.Scan.ReadType) CreateNamespaceRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.CreateNamespaceRequest) SetBalancerRunningRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetBalancerRunningRequest) TableName(org.apache.hadoop.hbase.TableName) TableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema) ModifyNamespaceResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ModifyNamespaceResponse) AdminService(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.AdminService) GetProcedureResultResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetProcedureResultResponse) GetTableNamesResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetTableNamesResponse) IOException(java.io.IOException) MasterRequestCallerBuilder(org.apache.hadoop.hbase.client.AsyncRpcRetryingCallerFactory.MasterRequestCallerBuilder) ListNamespaceDescriptorsRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.ListNamespaceDescriptorsRequest) RequestConverter(org.apache.hadoop.hbase.shaded.protobuf.RequestConverter) HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) DeleteNamespaceResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteNamespaceResponse) TimeUnit(java.util.concurrent.TimeUnit) DeleteColumnRequest(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnRequest) GetSchemaAlterStatusResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.GetSchemaAlterStatusResponse) CloseRegionResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.AdminProtos.CloseRegionResponse) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) DeleteColumnResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.DeleteColumnResponse) TruncateTableResponse(org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.TruncateTableResponse) Log(org.apache.commons.logging.Log) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) CompletableFuture(java.util.concurrent.CompletableFuture) TableSchema(org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.TableSchema) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 2 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestBatchCoprocessorEndpoint method setupBeforeClass.

@BeforeClass
public static void setupBeforeClass() throws Exception {
    // set configure to indicate which cp should be loaded
    Configuration conf = util.getConfiguration();
    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, org.apache.hadoop.hbase.coprocessor.ColumnAggregationEndpoint.class.getName(), ProtobufCoprocessorService.class.getName(), ColumnAggregationEndpointWithErrors.class.getName(), ColumnAggregationEndpointNullResponse.class.getName());
    conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, ProtobufCoprocessorService.class.getName());
    util.startMiniCluster(2);
    Admin admin = util.getAdmin();
    HTableDescriptor desc = new HTableDescriptor(TEST_TABLE);
    desc.addFamily(new HColumnDescriptor(TEST_FAMILY));
    admin.createTable(desc, new byte[][] { ROWS[rowSeperator1], ROWS[rowSeperator2] });
    util.waitUntilAllRegionsAssigned(TEST_TABLE);
    admin.close();
    Table table = util.getConnection().getTable(TEST_TABLE);
    for (int i = 0; i < ROWSIZE; i++) {
        Put put = new Put(ROWS[i]);
        put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i));
        table.put(put);
    }
    table.close();
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) BeforeClass(org.junit.BeforeClass)

Example 3 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestCoprocessorEndpoint method setupBeforeClass.

@BeforeClass
public static void setupBeforeClass() throws Exception {
    // set configure to indicate which cp should be loaded
    Configuration conf = util.getConfiguration();
    conf.setInt(HConstants.HBASE_CLIENT_OPERATION_TIMEOUT, 5000);
    conf.setStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY, org.apache.hadoop.hbase.coprocessor.ColumnAggregationEndpoint.class.getName(), ProtobufCoprocessorService.class.getName());
    conf.setStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY, ProtobufCoprocessorService.class.getName());
    util.startMiniCluster(2);
    Admin admin = util.getAdmin();
    HTableDescriptor desc = new HTableDescriptor(TEST_TABLE);
    desc.addFamily(new HColumnDescriptor(TEST_FAMILY));
    admin.createTable(desc, new byte[][] { ROWS[rowSeperator1], ROWS[rowSeperator2] });
    util.waitUntilAllRegionsAssigned(TEST_TABLE);
    Table table = util.getConnection().getTable(TEST_TABLE);
    for (int i = 0; i < ROWSIZE; i++) {
        Put put = new Put(ROWS[i]);
        put.addColumn(TEST_FAMILY, TEST_QUALIFIER, Bytes.toBytes(i));
        table.put(put);
    }
    table.close();
}
Also used : Table(org.apache.hadoop.hbase.client.Table) Configuration(org.apache.hadoop.conf.Configuration) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Admin(org.apache.hadoop.hbase.client.Admin) Put(org.apache.hadoop.hbase.client.Put) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) BeforeClass(org.junit.BeforeClass)

Example 4 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class BackupManager method createBackupInfo.

/**
   * Creates a backup info based on input backup request.
   * @param backupId backup id
   * @param type type
   * @param tableList table list
   * @param targetRootDir root dir
   * @param workers number of parallel workers
   * @param bandwidth bandwidth per worker in MB per sec
   * @return BackupInfo
   * @throws BackupException exception
   */
public BackupInfo createBackupInfo(String backupId, BackupType type, List<TableName> tableList, String targetRootDir, int workers, long bandwidth) throws BackupException {
    if (targetRootDir == null) {
        throw new BackupException("Wrong backup request parameter: target backup root directory");
    }
    if (type == BackupType.FULL && (tableList == null || tableList.isEmpty())) {
        // If table list is null for full backup, which means backup all tables. Then fill the table
        // list with all user tables from meta. It no table available, throw the request exception.
        HTableDescriptor[] htds = null;
        try (Admin admin = conn.getAdmin()) {
            htds = admin.listTables();
        } catch (Exception e) {
            throw new BackupException(e);
        }
        if (htds == null) {
            throw new BackupException("No table exists for full backup of all tables.");
        } else {
            tableList = new ArrayList<>();
            for (HTableDescriptor hTableDescriptor : htds) {
                TableName tn = hTableDescriptor.getTableName();
                if (tn.equals(BackupSystemTable.getTableName(conf))) {
                    // skip backup system table
                    continue;
                }
                tableList.add(hTableDescriptor.getTableName());
            }
            LOG.info("Full backup all the tables available in the cluster: " + tableList);
        }
    }
    // there are one or more tables in the table list
    backupInfo = new BackupInfo(backupId, type, tableList.toArray(new TableName[tableList.size()]), targetRootDir);
    backupInfo.setBandwidth(bandwidth);
    backupInfo.setWorkers(workers);
    return backupInfo;
}
Also used : BackupInfo(org.apache.hadoop.hbase.backup.BackupInfo) TableName(org.apache.hadoop.hbase.TableName) Admin(org.apache.hadoop.hbase.client.Admin) IOException(java.io.IOException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 5 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestRSGroupBasedLoadBalancer method getTableName.

private TableName getTableName(ServerName sn) throws IOException {
    TableName tableName = null;
    RSGroupInfoManager gm = getMockedGroupInfoManager();
    RSGroupInfo groupOfServer = null;
    for (RSGroupInfo gInfo : gm.listRSGroups()) {
        if (gInfo.containsServer(sn.getAddress())) {
            groupOfServer = gInfo;
            break;
        }
    }
    for (HTableDescriptor desc : tableDescs) {
        if (gm.getRSGroupOfTable(desc.getTableName()).endsWith(groupOfServer.getName())) {
            tableName = desc.getTableName();
        }
    }
    return tableName;
}
Also used : TableName(org.apache.hadoop.hbase.TableName) RSGroupInfo(org.apache.hadoop.hbase.rsgroup.RSGroupInfo) RSGroupInfoManager(org.apache.hadoop.hbase.rsgroup.RSGroupInfoManager) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Aggregations

HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)867 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)555 Test (org.junit.Test)425 TableName (org.apache.hadoop.hbase.TableName)258 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)171 IOException (java.io.IOException)167 Put (org.apache.hadoop.hbase.client.Put)149 Table (org.apache.hadoop.hbase.client.Table)134 Path (org.apache.hadoop.fs.Path)127 Admin (org.apache.hadoop.hbase.client.Admin)121 Configuration (org.apache.hadoop.conf.Configuration)87 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)62 Connection (org.apache.hadoop.hbase.client.Connection)57 Scan (org.apache.hadoop.hbase.client.Scan)51 Cell (org.apache.hadoop.hbase.Cell)44 Delete (org.apache.hadoop.hbase.client.Delete)44 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)43