Search in sources :

Example 61 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestAsyncTableAdminApi method testAddColumnFamily.

@Test
public void testAddColumnFamily() throws IOException {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    // Create a table with two families
    HTableDescriptor baseHtd = new HTableDescriptor(tableName);
    baseHtd.addFamily(new HColumnDescriptor(FAMILY_0));
    admin.createTable(baseHtd).join();
    admin.disableTable(tableName).join();
    try {
        // Verify the table descriptor
        verifyTableDescriptor(tableName, FAMILY_0);
        // Modify the table removing one family and verify the descriptor
        admin.addColumnFamily(tableName, new HColumnDescriptor(FAMILY_1)).join();
        verifyTableDescriptor(tableName, FAMILY_0, FAMILY_1);
    } finally {
        admin.deleteTable(tableName);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 62 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestAsyncTableAdminApi method testDisableCatalogTable.

@Test(timeout = 300000)
public void testDisableCatalogTable() throws Exception {
    try {
        this.admin.disableTable(TableName.META_TABLE_NAME).join();
        fail("Expected to throw ConstraintException");
    } catch (Exception e) {
    }
    // Before the fix for HBASE-6146, the below table creation was failing as the hbase:meta table
    // actually getting disabled by the disableTable() call.
    HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(name.getMethodName().getBytes()));
    HColumnDescriptor hcd = new HColumnDescriptor("cf1".getBytes());
    htd.addFamily(hcd);
    admin.createTable(htd).join();
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException) IOException(java.io.IOException) CompletionException(java.util.concurrent.CompletionException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 63 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestAsyncTableAdminApi method testCreateTableWithEmptyRowInTheSplitKeys.

@Test(timeout = 300000)
public void testCreateTableWithEmptyRowInTheSplitKeys() throws IOException {
    byte[] tableName = Bytes.toBytes(name.getMethodName());
    byte[][] splitKeys = new byte[3][];
    splitKeys[0] = "region1".getBytes();
    splitKeys[1] = HConstants.EMPTY_BYTE_ARRAY;
    splitKeys[2] = "region2".getBytes();
    HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
    desc.addFamily(new HColumnDescriptor("col"));
    try {
        admin.createTable(desc, splitKeys).join();
        fail("Test case should fail as empty split key is passed.");
    } catch (CompletionException e) {
        assertTrue(e.getCause() instanceof IllegalArgumentException);
    }
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) CompletionException(java.util.concurrent.CompletionException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 64 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestAsyncTableAdminApi method testDeleteSameColumnFamilyTwice.

@Test
public void testDeleteSameColumnFamilyTwice() throws IOException {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    // Create a table with two families
    HTableDescriptor baseHtd = new HTableDescriptor(tableName);
    baseHtd.addFamily(new HColumnDescriptor(FAMILY_0));
    baseHtd.addFamily(new HColumnDescriptor(FAMILY_1));
    admin.createTable(baseHtd).join();
    admin.disableTable(tableName).join();
    try {
        // Verify the table descriptor
        verifyTableDescriptor(tableName, FAMILY_0, FAMILY_1);
        // Modify the table removing one family and verify the descriptor
        admin.deleteColumnFamily(tableName, FAMILY_1).join();
        verifyTableDescriptor(tableName, FAMILY_0);
        try {
            // Delete again - expect failure
            admin.deleteColumnFamily(tableName, FAMILY_1).join();
            Assert.fail("Delete a non-exist column family should fail");
        } catch (Exception e) {
        // Expected.
        }
    } finally {
        admin.deleteTable(tableName).join();
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException) IOException(java.io.IOException) CompletionException(java.util.concurrent.CompletionException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 65 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestAsyncTableAdminApi method testAddSameColumnFamilyTwice.

@Test
public void testAddSameColumnFamilyTwice() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    // Create a table with one families
    HTableDescriptor baseHtd = new HTableDescriptor(tableName);
    baseHtd.addFamily(new HColumnDescriptor(FAMILY_0));
    admin.createTable(baseHtd).join();
    admin.disableTable(tableName).join();
    try {
        // Verify the table descriptor
        verifyTableDescriptor(tableName, FAMILY_0);
        // Modify the table removing one family and verify the descriptor
        this.admin.addColumnFamily(tableName, new HColumnDescriptor(FAMILY_1)).join();
        verifyTableDescriptor(tableName, FAMILY_0, FAMILY_1);
        try {
            // Add same column family again - expect failure
            this.admin.addColumnFamily(tableName, new HColumnDescriptor(FAMILY_1)).join();
            Assert.fail("Delete a non-exist column family should fail");
        } catch (Exception e) {
        // Expected.
        }
    } finally {
        admin.deleteTable(tableName).join();
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException) IOException(java.io.IOException) CompletionException(java.util.concurrent.CompletionException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Aggregations

HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)867 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)555 Test (org.junit.Test)425 TableName (org.apache.hadoop.hbase.TableName)258 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)171 IOException (java.io.IOException)167 Put (org.apache.hadoop.hbase.client.Put)149 Table (org.apache.hadoop.hbase.client.Table)134 Path (org.apache.hadoop.fs.Path)127 Admin (org.apache.hadoop.hbase.client.Admin)121 Configuration (org.apache.hadoop.conf.Configuration)87 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)62 Connection (org.apache.hadoop.hbase.client.Connection)57 Scan (org.apache.hadoop.hbase.client.Scan)51 Cell (org.apache.hadoop.hbase.Cell)44 Delete (org.apache.hadoop.hbase.client.Delete)44 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)43