Search in sources :

Example 31 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestAdmin1 method testDeleteEditUnknownColumnFamilyAndOrTable.

@Test(timeout = 300000)
public void testDeleteEditUnknownColumnFamilyAndOrTable() throws IOException {
    // Test we get exception if we try to
    final TableName nonexistentTable = TableName.valueOf("nonexistent");
    final byte[] nonexistentColumn = Bytes.toBytes("nonexistent");
    HColumnDescriptor nonexistentHcd = new HColumnDescriptor(nonexistentColumn);
    Exception exception = null;
    try {
        this.admin.addColumnFamily(nonexistentTable, nonexistentHcd);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.deleteTable(nonexistentTable);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.deleteColumnFamily(nonexistentTable, nonexistentColumn);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.disableTable(nonexistentTable);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.enableTable(nonexistentTable);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.modifyColumnFamily(nonexistentTable, nonexistentHcd);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        HTableDescriptor htd = new HTableDescriptor(nonexistentTable);
        htd.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
        this.admin.modifyTable(htd.getTableName(), htd);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    // Now make it so at least the table exists and then do tests against a
    // nonexistent column family -- see if we get right exceptions.
    final TableName tableName = TableName.valueOf(name.getMethodName() + System.currentTimeMillis());
    HTableDescriptor htd = new HTableDescriptor(tableName);
    htd.addFamily(new HColumnDescriptor("cf"));
    this.admin.createTable(htd);
    try {
        exception = null;
        try {
            this.admin.deleteColumnFamily(htd.getTableName(), nonexistentHcd.getName());
        } catch (IOException e) {
            exception = e;
        }
        assertTrue("found=" + exception.getClass().getName(), exception instanceof InvalidFamilyOperationException);
        exception = null;
        try {
            this.admin.modifyColumnFamily(htd.getTableName(), nonexistentHcd);
        } catch (IOException e) {
            exception = e;
        }
        assertTrue("found=" + exception.getClass().getName(), exception instanceof InvalidFamilyOperationException);
    } finally {
        this.admin.disableTable(tableName);
        this.admin.deleteTable(tableName);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) InvalidFamilyOperationException(org.apache.hadoop.hbase.InvalidFamilyOperationException) IOException(java.io.IOException) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) InvalidFamilyOperationException(org.apache.hadoop.hbase.InvalidFamilyOperationException) MergeRegionException(org.apache.hadoop.hbase.exceptions.MergeRegionException) MasterNotRunningException(org.apache.hadoop.hbase.MasterNotRunningException) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException) ZooKeeperConnectionException(org.apache.hadoop.hbase.ZooKeeperConnectionException) IOException(java.io.IOException) TableNotDisabledException(org.apache.hadoop.hbase.TableNotDisabledException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 32 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestAdmin1 method testCreateTableNumberOfRegions.

@Test(timeout = 300000)
public void testCreateTableNumberOfRegions() throws IOException, InterruptedException {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc);
    List<HRegionLocation> regions;
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(tableName)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have only 1 region", 1, regions.size());
    }
    TableName TABLE_2 = TableName.valueOf(tableName.getNameAsString() + "_2");
    desc = new HTableDescriptor(TABLE_2);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, new byte[][] { new byte[] { 42 } });
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(TABLE_2)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have only 2 region", 2, regions.size());
    }
    TableName TABLE_3 = TableName.valueOf(tableName.getNameAsString() + "_3");
    desc = new HTableDescriptor(TABLE_3);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, "a".getBytes(), "z".getBytes(), 3);
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(TABLE_3)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have only 3 region", 3, regions.size());
    }
    TableName TABLE_4 = TableName.valueOf(tableName.getNameAsString() + "_4");
    desc = new HTableDescriptor(TABLE_4);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    try {
        admin.createTable(desc, "a".getBytes(), "z".getBytes(), 2);
        fail("Should not be able to create a table with only 2 regions using this API.");
    } catch (IllegalArgumentException eae) {
    // Expected
    }
    TableName TABLE_5 = TableName.valueOf(tableName.getNameAsString() + "_5");
    desc = new HTableDescriptor(TABLE_5);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, new byte[] { 1 }, new byte[] { 127 }, 16);
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(TABLE_5)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have 16 region", 16, regions.size());
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 33 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestAdmin2 method testGetTableRegions.

/**
   * For HBASE-2556
   * @throws IOException
   */
@Test(timeout = 300000)
public void testGetTableRegions() throws IOException {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    int expectedRegions = 10;
    // Use 80 bit numbers to make sure we aren't limited
    byte[] startKey = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 };
    byte[] endKey = { 9, 9, 9, 9, 9, 9, 9, 9, 9, 9 };
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, startKey, endKey, expectedRegions);
    List<HRegionInfo> RegionInfos = admin.getTableRegions(tableName);
    assertEquals("Tried to create " + expectedRegions + " regions " + "but only found " + RegionInfos.size(), expectedRegions, RegionInfos.size());
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) TableName(org.apache.hadoop.hbase.TableName) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 34 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestAdmin2 method createTable.

private HBaseAdmin createTable(TableName tableName) throws IOException {
    HBaseAdmin admin = TEST_UTIL.getHBaseAdmin();
    HTableDescriptor htd = new HTableDescriptor(tableName);
    HColumnDescriptor hcd = new HColumnDescriptor("value");
    htd.addFamily(hcd);
    admin.createTable(htd, null);
    return admin;
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Example 35 with HTableDescriptor

use of org.apache.hadoop.hbase.HTableDescriptor in project hbase by apache.

the class TestAsyncRegionAdminApi method createTableWithDefaultConf.

private void createTableWithDefaultConf(TableName TABLENAME) throws Exception {
    HTableDescriptor htd = new HTableDescriptor(TABLENAME);
    HColumnDescriptor hcd = new HColumnDescriptor("value");
    htd.addFamily(hcd);
    admin.createTable(htd, null).get();
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor)

Aggregations

HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)867 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)555 Test (org.junit.Test)425 TableName (org.apache.hadoop.hbase.TableName)258 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)171 IOException (java.io.IOException)167 Put (org.apache.hadoop.hbase.client.Put)149 Table (org.apache.hadoop.hbase.client.Table)134 Path (org.apache.hadoop.fs.Path)127 Admin (org.apache.hadoop.hbase.client.Admin)121 Configuration (org.apache.hadoop.conf.Configuration)87 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)62 Connection (org.apache.hadoop.hbase.client.Connection)57 Scan (org.apache.hadoop.hbase.client.Scan)51 Cell (org.apache.hadoop.hbase.Cell)44 Delete (org.apache.hadoop.hbase.client.Delete)44 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)43