Search in sources :

Example 51 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class IntegrationTestBulkLoad method installSlowingCoproc.

/**
 * Modify table {@code getTableName()} to carry {@link SlowMeCoproScanOperations}.
 */
private void installSlowingCoproc() throws IOException, InterruptedException {
    int replicaCount = conf.getInt(NUM_REPLICA_COUNT_KEY, NUM_REPLICA_COUNT_DEFAULT);
    if (replicaCount == NUM_REPLICA_COUNT_DEFAULT)
        return;
    TableName t = getTablename();
    Admin admin = util.getAdmin();
    TableDescriptor desc = admin.getDescriptor(t);
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(desc);
    builder.setCoprocessor(SlowMeCoproScanOperations.class.getName());
    admin.modifyTable(builder.build());
}
Also used : TableName(org.apache.hadoop.hbase.TableName) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) Admin(org.apache.hadoop.hbase.client.Admin) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 52 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class RemoveColumnAction method perform.

@Override
public void perform() throws Exception {
    TableDescriptor tableDescriptor = admin.getDescriptor(tableName);
    ColumnFamilyDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
    if (columnDescriptors.length <= (protectedColumns == null ? 1 : protectedColumns.size())) {
        return;
    }
    int index = random.nextInt(columnDescriptors.length);
    while (protectedColumns != null && protectedColumns.contains(columnDescriptors[index].getNameAsString())) {
        index = random.nextInt(columnDescriptors.length);
    }
    byte[] colDescName = columnDescriptors[index].getName();
    getLogger().debug("Performing action: Removing " + Bytes.toString(colDescName) + " from " + tableName.getNameAsString());
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
    builder.removeColumnFamily(colDescName);
    // Don't try the modify if we're stopping
    if (context.isStopping()) {
        return;
    }
    admin.modifyTable(builder.build());
}
Also used : TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 53 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class DecreaseMaxHFileSizeAction method perform.

@Override
public void perform() throws Exception {
    TableDescriptor td = admin.getDescriptor(tableName);
    // Try and get the current value.
    long currentValue = td.getMaxFileSize();
    // That's ok. We're trying to cause chaos.
    if (currentValue <= 0) {
        currentValue = context.getHBaseCluster().getConf().getLong(HConstants.HREGION_MAX_FILESIZE, HConstants.DEFAULT_MAX_FILE_SIZE);
    }
    // Decrease by 10% at a time.
    long newValue = (long) (currentValue * 0.9);
    // We don't want to go too far below 1gb.
    // So go to about 1gb +/- 512 on each side.
    newValue = Math.max(minFileSize, newValue) - (512 - random.nextInt(1024));
    // Change the table descriptor.
    TableDescriptor modifiedTable = TableDescriptorBuilder.newBuilder(td).setMaxFileSize(newValue).build();
    // Don't try the modify if we're stopping
    if (context.isStopping()) {
        return;
    }
    // modify the table.
    admin.modifyTable(modifiedTable);
    // Sleep some time.
    if (sleepTime > 0) {
        Thread.sleep(sleepTime);
    }
}
Also used : TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 54 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestSecureRESTServer method instertData.

private static void instertData() throws IOException, InterruptedException {
    // Create a table, write a row to it, grant read perms to the client
    UserGroupInformation superuser = UserGroupInformation.loginUserFromKeytabAndReturnUGI(SERVICE_PRINCIPAL, serviceKeytab.getAbsolutePath());
    final TableName table = TableName.valueOf("publicTable");
    superuser.doAs(new PrivilegedExceptionAction<Void>() {

        @Override
        public Void run() throws Exception {
            try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) {
                TableDescriptor desc = TableDescriptorBuilder.newBuilder(table).setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1")).build();
                conn.getAdmin().createTable(desc);
                try (Table t = conn.getTable(table)) {
                    Put p = new Put(Bytes.toBytes("a"));
                    p.addColumn(Bytes.toBytes("f1"), new byte[0], Bytes.toBytes("1"));
                    t.put(p);
                }
                AccessControlClient.grant(conn, CLIENT_PRINCIPAL, Action.READ);
            } catch (Throwable e) {
                if (e instanceof Exception) {
                    throw (Exception) e;
                } else {
                    throw new Exception(e);
                }
            }
            return null;
        }
    });
}
Also used : TableName(org.apache.hadoop.hbase.TableName) Table(org.apache.hadoop.hbase.client.Table) HttpURLConnection(java.net.HttpURLConnection) Connection(org.apache.hadoop.hbase.client.Connection) IOException(java.io.IOException) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Put(org.apache.hadoop.hbase.client.Put) HttpPut(org.apache.http.client.methods.HttpPut) UserGroupInformation(org.apache.hadoop.security.UserGroupInformation)

Example 55 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class LocalHBaseCluster method main.

/**
 * Test things basically work.
 */
public static void main(String[] args) throws IOException {
    Configuration conf = HBaseConfiguration.create();
    LocalHBaseCluster cluster = new LocalHBaseCluster(conf);
    cluster.startup();
    try (Connection connection = ConnectionFactory.createConnection(conf);
        Admin admin = connection.getAdmin()) {
        TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(cluster.getClass().getName())).build();
        admin.createTable(htd);
    } finally {
        cluster.shutdown();
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Connection(org.apache.hadoop.hbase.client.Connection) Admin(org.apache.hadoop.hbase.client.Admin) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Aggregations

TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)639 Test (org.junit.Test)356 TableName (org.apache.hadoop.hbase.TableName)237 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)180 IOException (java.io.IOException)151 Put (org.apache.hadoop.hbase.client.Put)142 Admin (org.apache.hadoop.hbase.client.Admin)136 Path (org.apache.hadoop.fs.Path)124 Table (org.apache.hadoop.hbase.client.Table)121 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)96 Configuration (org.apache.hadoop.conf.Configuration)91 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)66 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)64 Connection (org.apache.hadoop.hbase.client.Connection)59 Scan (org.apache.hadoop.hbase.client.Scan)50 Get (org.apache.hadoop.hbase.client.Get)49 List (java.util.List)39