Search in sources :

Example 11 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class IntegrationTestBulkLoad method installSlowingCoproc.

/**
 * Modify table {@code getTableName()} to carry {@link SlowMeCoproScanOperations}.
 */
private void installSlowingCoproc() throws IOException, InterruptedException {
    int replicaCount = conf.getInt(NUM_REPLICA_COUNT_KEY, NUM_REPLICA_COUNT_DEFAULT);
    if (replicaCount == NUM_REPLICA_COUNT_DEFAULT)
        return;
    TableName t = getTablename();
    Admin admin = util.getAdmin();
    TableDescriptor desc = admin.getDescriptor(t);
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(desc);
    builder.setCoprocessor(SlowMeCoproScanOperations.class.getName());
    admin.modifyTable(builder.build());
}
Also used : TableName(org.apache.hadoop.hbase.TableName) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) Admin(org.apache.hadoop.hbase.client.Admin) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 12 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class IntegrationTestMTTR method setupTables.

private static void setupTables() throws IOException {
    // Get the table name.
    tableName = TableName.valueOf(util.getConfiguration().get("hbase.IntegrationTestMTTR.tableName", "IntegrationTestMTTR"));
    loadTableName = TableName.valueOf(util.getConfiguration().get("hbase.IntegrationTestMTTR.loadTableName", "IntegrationTestMTTRLoadTestTool"));
    if (util.getAdmin().tableExists(tableName)) {
        util.deleteTable(tableName);
    }
    if (util.getAdmin().tableExists(loadTableName)) {
        util.deleteTable(loadTableName);
    }
    // Create the table.  If this fails then fail everything.
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
    // Make the max file size huge so that splits don't happen during the test.
    builder.setMaxFileSize(Long.MAX_VALUE);
    ColumnFamilyDescriptorBuilder colDescriptorBldr = ColumnFamilyDescriptorBuilder.newBuilder(FAMILY);
    colDescriptorBldr.setMaxVersions(1);
    builder.setColumnFamily(colDescriptorBldr.build());
    util.getAdmin().createTable(builder.build());
    // Setup the table for LoadTestTool
    int ret = loadTool.run(new String[] { "-tn", loadTableName.getNameAsString(), "-init_only" });
    assertEquals("Failed to initialize LoadTestTool", 0, ret);
}
Also used : ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder)

Example 13 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class RemoveColumnAction method perform.

@Override
public void perform() throws Exception {
    TableDescriptor tableDescriptor = admin.getDescriptor(tableName);
    ColumnFamilyDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
    if (columnDescriptors.length <= (protectedColumns == null ? 1 : protectedColumns.size())) {
        return;
    }
    int index = random.nextInt(columnDescriptors.length);
    while (protectedColumns != null && protectedColumns.contains(columnDescriptors[index].getNameAsString())) {
        index = random.nextInt(columnDescriptors.length);
    }
    byte[] colDescName = columnDescriptors[index].getName();
    getLogger().debug("Performing action: Removing " + Bytes.toString(colDescName) + " from " + tableName.getNameAsString());
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
    builder.removeColumnFamily(colDescName);
    // Don't try the modify if we're stopping
    if (context.isStopping()) {
        return;
    }
    admin.modifyTable(builder.build());
}
Also used : TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 14 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class TestTableScan method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    conf = TEST_UTIL.getConfiguration();
    conf.set(Constants.CUSTOM_FILTERS, "CustomFilter:" + CustomFilter.class.getName());
    TEST_UTIL.startMiniCluster();
    REST_TEST_UTIL.startServletContainer(conf);
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    Admin admin = TEST_UTIL.getAdmin();
    if (!admin.tableExists(TABLE)) {
        TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TABLE);
        ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFA)).build();
        tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
        columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFB)).build();
        tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
        admin.createTable(tableDescriptorBuilder.build());
        expectedRows1 = TestScannerResource.insertData(conf, TABLE, COLUMN_1, 1.0);
        expectedRows2 = TestScannerResource.insertData(conf, TABLE, COLUMN_2, 0.5);
        expectedRows3 = TestScannerResource.insertData(conf, TABLE, COLUMN_EMPTY, 1.0);
    }
}
Also used : Cluster(org.apache.hadoop.hbase.rest.client.Cluster) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) Client(org.apache.hadoop.hbase.rest.client.Client) Admin(org.apache.hadoop.hbase.client.Admin) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) BeforeClass(org.junit.BeforeClass)

Example 15 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class ImportTsv method createTable.

private static void createTable(Admin admin, TableName tableName, String[] columns) throws IOException {
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableName);
    Set<String> cfSet = getColumnFamilies(columns);
    for (String cf : cfSet) {
        builder.setColumnFamily(ColumnFamilyDescriptorBuilder.of(cf));
    }
    LOG.warn(format("Creating table '%s' with '%s' columns and default descriptors.", tableName, cfSet));
    admin.createTable(builder.build());
}
Also used : TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder)

Aggregations

TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)190 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)76 Test (org.junit.Test)68 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)61 ColumnFamilyDescriptorBuilder (org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder)47 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)39 TableName (org.apache.hadoop.hbase.TableName)34 Path (org.apache.hadoop.fs.Path)31 Admin (org.apache.hadoop.hbase.client.Admin)29 Put (org.apache.hadoop.hbase.client.Put)25 IOException (java.io.IOException)24 Configuration (org.apache.hadoop.conf.Configuration)20 Table (org.apache.hadoop.hbase.client.Table)18 ArrayList (java.util.ArrayList)17 FileSystem (org.apache.hadoop.fs.FileSystem)15 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)15 Before (org.junit.Before)12 Cell (org.apache.hadoop.hbase.Cell)11 NamespaceDescriptor (org.apache.hadoop.hbase.NamespaceDescriptor)10 HashMap (java.util.HashMap)9