use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.
the class IntegrationTestBulkLoad method installSlowingCoproc.
/**
* Modify table {@code getTableName()} to carry {@link SlowMeCoproScanOperations}.
*/
private void installSlowingCoproc() throws IOException, InterruptedException {
int replicaCount = conf.getInt(NUM_REPLICA_COUNT_KEY, NUM_REPLICA_COUNT_DEFAULT);
if (replicaCount == NUM_REPLICA_COUNT_DEFAULT)
return;
TableName t = getTablename();
Admin admin = util.getAdmin();
TableDescriptor desc = admin.getDescriptor(t);
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(desc);
builder.setCoprocessor(SlowMeCoproScanOperations.class.getName());
admin.modifyTable(builder.build());
}
use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.
the class RemoveColumnAction method perform.
@Override
public void perform() throws Exception {
TableDescriptor tableDescriptor = admin.getDescriptor(tableName);
ColumnFamilyDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
if (columnDescriptors.length <= (protectedColumns == null ? 1 : protectedColumns.size())) {
return;
}
int index = random.nextInt(columnDescriptors.length);
while (protectedColumns != null && protectedColumns.contains(columnDescriptors[index].getNameAsString())) {
index = random.nextInt(columnDescriptors.length);
}
byte[] colDescName = columnDescriptors[index].getName();
getLogger().debug("Performing action: Removing " + Bytes.toString(colDescName) + " from " + tableName.getNameAsString());
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
builder.removeColumnFamily(colDescName);
// Don't try the modify if we're stopping
if (context.isStopping()) {
return;
}
admin.modifyTable(builder.build());
}
use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.
the class DecreaseMaxHFileSizeAction method perform.
@Override
public void perform() throws Exception {
TableDescriptor td = admin.getDescriptor(tableName);
// Try and get the current value.
long currentValue = td.getMaxFileSize();
// That's ok. We're trying to cause chaos.
if (currentValue <= 0) {
currentValue = context.getHBaseCluster().getConf().getLong(HConstants.HREGION_MAX_FILESIZE, HConstants.DEFAULT_MAX_FILE_SIZE);
}
// Decrease by 10% at a time.
long newValue = (long) (currentValue * 0.9);
// We don't want to go too far below 1gb.
// So go to about 1gb +/- 512 on each side.
newValue = Math.max(minFileSize, newValue) - (512 - random.nextInt(1024));
// Change the table descriptor.
TableDescriptor modifiedTable = TableDescriptorBuilder.newBuilder(td).setMaxFileSize(newValue).build();
// Don't try the modify if we're stopping
if (context.isStopping()) {
return;
}
// modify the table.
admin.modifyTable(modifiedTable);
// Sleep some time.
if (sleepTime > 0) {
Thread.sleep(sleepTime);
}
}
use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.
the class TestSecureRESTServer method instertData.
private static void instertData() throws IOException, InterruptedException {
// Create a table, write a row to it, grant read perms to the client
UserGroupInformation superuser = UserGroupInformation.loginUserFromKeytabAndReturnUGI(SERVICE_PRINCIPAL, serviceKeytab.getAbsolutePath());
final TableName table = TableName.valueOf("publicTable");
superuser.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try (Connection conn = ConnectionFactory.createConnection(TEST_UTIL.getConfiguration())) {
TableDescriptor desc = TableDescriptorBuilder.newBuilder(table).setColumnFamily(ColumnFamilyDescriptorBuilder.of("f1")).build();
conn.getAdmin().createTable(desc);
try (Table t = conn.getTable(table)) {
Put p = new Put(Bytes.toBytes("a"));
p.addColumn(Bytes.toBytes("f1"), new byte[0], Bytes.toBytes("1"));
t.put(p);
}
AccessControlClient.grant(conn, CLIENT_PRINCIPAL, Action.READ);
} catch (Throwable e) {
if (e instanceof Exception) {
throw (Exception) e;
} else {
throw new Exception(e);
}
}
return null;
}
});
}
use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.
the class LocalHBaseCluster method main.
/**
* Test things basically work.
*/
public static void main(String[] args) throws IOException {
Configuration conf = HBaseConfiguration.create();
LocalHBaseCluster cluster = new LocalHBaseCluster(conf);
cluster.startup();
try (Connection connection = ConnectionFactory.createConnection(conf);
Admin admin = connection.getAdmin()) {
TableDescriptor htd = TableDescriptorBuilder.newBuilder(TableName.valueOf(cluster.getClass().getName())).build();
admin.createTable(htd);
} finally {
cluster.shutdown();
}
}
Aggregations