use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.
the class IntegrationTestBulkLoad method installSlowingCoproc.
/**
* Modify table {@code getTableName()} to carry {@link SlowMeCoproScanOperations}.
*/
private void installSlowingCoproc() throws IOException, InterruptedException {
int replicaCount = conf.getInt(NUM_REPLICA_COUNT_KEY, NUM_REPLICA_COUNT_DEFAULT);
if (replicaCount == NUM_REPLICA_COUNT_DEFAULT)
return;
TableName t = getTablename();
Admin admin = util.getAdmin();
TableDescriptor desc = admin.getDescriptor(t);
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(desc);
builder.setCoprocessor(SlowMeCoproScanOperations.class.getName());
admin.modifyTable(builder.build());
}
use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.
the class IntegrationTestMTTR method setupTables.
private static void setupTables() throws IOException {
// Get the table name.
tableName = TableName.valueOf(util.getConfiguration().get("hbase.IntegrationTestMTTR.tableName", "IntegrationTestMTTR"));
loadTableName = TableName.valueOf(util.getConfiguration().get("hbase.IntegrationTestMTTR.loadTableName", "IntegrationTestMTTRLoadTestTool"));
if (util.getAdmin().tableExists(tableName)) {
util.deleteTable(tableName);
}
if (util.getAdmin().tableExists(loadTableName)) {
util.deleteTable(loadTableName);
}
// Create the table. If this fails then fail everything.
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
// Make the max file size huge so that splits don't happen during the test.
builder.setMaxFileSize(Long.MAX_VALUE);
ColumnFamilyDescriptorBuilder colDescriptorBldr = ColumnFamilyDescriptorBuilder.newBuilder(FAMILY);
colDescriptorBldr.setMaxVersions(1);
builder.setColumnFamily(colDescriptorBldr.build());
util.getAdmin().createTable(builder.build());
// Setup the table for LoadTestTool
int ret = loadTool.run(new String[] { "-tn", loadTableName.getNameAsString(), "-init_only" });
assertEquals("Failed to initialize LoadTestTool", 0, ret);
}
use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.
the class RemoveColumnAction method perform.
@Override
public void perform() throws Exception {
TableDescriptor tableDescriptor = admin.getDescriptor(tableName);
ColumnFamilyDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
if (columnDescriptors.length <= (protectedColumns == null ? 1 : protectedColumns.size())) {
return;
}
int index = random.nextInt(columnDescriptors.length);
while (protectedColumns != null && protectedColumns.contains(columnDescriptors[index].getNameAsString())) {
index = random.nextInt(columnDescriptors.length);
}
byte[] colDescName = columnDescriptors[index].getName();
getLogger().debug("Performing action: Removing " + Bytes.toString(colDescName) + " from " + tableName.getNameAsString());
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
builder.removeColumnFamily(colDescName);
// Don't try the modify if we're stopping
if (context.isStopping()) {
return;
}
admin.modifyTable(builder.build());
}
use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.
the class TestTableScan method setUpBeforeClass.
@BeforeClass
public static void setUpBeforeClass() throws Exception {
conf = TEST_UTIL.getConfiguration();
conf.set(Constants.CUSTOM_FILTERS, "CustomFilter:" + CustomFilter.class.getName());
TEST_UTIL.startMiniCluster();
REST_TEST_UTIL.startServletContainer(conf);
client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
Admin admin = TEST_UTIL.getAdmin();
if (!admin.tableExists(TABLE)) {
TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TABLE);
ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFA)).build();
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFB)).build();
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
admin.createTable(tableDescriptorBuilder.build());
expectedRows1 = TestScannerResource.insertData(conf, TABLE, COLUMN_1, 1.0);
expectedRows2 = TestScannerResource.insertData(conf, TABLE, COLUMN_2, 0.5);
expectedRows3 = TestScannerResource.insertData(conf, TABLE, COLUMN_EMPTY, 1.0);
}
}
use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.
the class ImportTsv method createTable.
private static void createTable(Admin admin, TableName tableName, String[] columns) throws IOException {
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
Set<String> cfSet = getColumnFamilies(columns);
for (String cf : cfSet) {
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(cf));
}
LOG.warn(format("Creating table '%s' with '%s' columns and default descriptors.", tableName, cfSet));
admin.createTable(builder.build());
}
Aggregations