Search in sources :

Example 71 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class TestAdmin1 method testTableAvailableWithRandomSplitKeys.

@Test(timeout = 300000)
public void testTableAvailableWithRandomSplitKeys() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor("col"));
    byte[][] splitKeys = new byte[1][];
    splitKeys = new byte[][] { new byte[] { 1, 1, 1 }, new byte[] { 2, 2, 2 } };
    admin.createTable(desc);
    boolean tableAvailable = admin.isTableAvailable(tableName, splitKeys);
    assertFalse("Table should be created with 1 row in META", tableAvailable);
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 72 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class TestAdmin1 method testOnlineChangeTableSchema.

/**
   * Verify schema modification takes.
   * @throws IOException
   * @throws InterruptedException
   */
@Test(timeout = 300000)
public void testOnlineChangeTableSchema() throws IOException, InterruptedException {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    HTableDescriptor[] tables = admin.listTables();
    int numTables = tables.length;
    TEST_UTIL.createTable(tableName, HConstants.CATALOG_FAMILY).close();
    tables = this.admin.listTables();
    assertEquals(numTables + 1, tables.length);
    // FIRST, do htabledescriptor changes.
    HTableDescriptor htd = this.admin.getTableDescriptor(tableName);
    // Make a copy and assert copy is good.
    HTableDescriptor copy = new HTableDescriptor(htd);
    assertTrue(htd.equals(copy));
    // Now amend the copy. Introduce differences.
    long newFlushSize = htd.getMemStoreFlushSize() / 2;
    if (newFlushSize <= 0) {
        newFlushSize = HTableDescriptor.DEFAULT_MEMSTORE_FLUSH_SIZE / 2;
    }
    copy.setMemStoreFlushSize(newFlushSize);
    final String key = "anyoldkey";
    assertTrue(htd.getValue(key) == null);
    copy.setValue(key, key);
    boolean expectedException = false;
    try {
        admin.modifyTable(tableName, copy);
    } catch (TableNotDisabledException re) {
        expectedException = true;
    }
    assertFalse(expectedException);
    HTableDescriptor modifiedHtd = this.admin.getTableDescriptor(tableName);
    assertFalse(htd.equals(modifiedHtd));
    assertTrue(copy.equals(modifiedHtd));
    assertEquals(newFlushSize, modifiedHtd.getMemStoreFlushSize());
    assertEquals(key, modifiedHtd.getValue(key));
    // Now work on column family changes.
    int countOfFamilies = modifiedHtd.getFamilies().size();
    assertTrue(countOfFamilies > 0);
    HColumnDescriptor hcd = modifiedHtd.getFamilies().iterator().next();
    int maxversions = hcd.getMaxVersions();
    final int newMaxVersions = maxversions + 1;
    hcd.setMaxVersions(newMaxVersions);
    final byte[] hcdName = hcd.getName();
    expectedException = false;
    try {
        this.admin.modifyColumnFamily(tableName, hcd);
    } catch (TableNotDisabledException re) {
        expectedException = true;
    }
    assertFalse(expectedException);
    modifiedHtd = this.admin.getTableDescriptor(tableName);
    HColumnDescriptor modifiedHcd = modifiedHtd.getFamily(hcdName);
    assertEquals(newMaxVersions, modifiedHcd.getMaxVersions());
    // Try adding a column
    assertFalse(this.admin.isTableDisabled(tableName));
    final String xtracolName = "xtracol";
    HColumnDescriptor xtracol = new HColumnDescriptor(xtracolName);
    xtracol.setValue(xtracolName, xtracolName);
    expectedException = false;
    try {
        this.admin.addColumnFamily(tableName, xtracol);
    } catch (TableNotDisabledException re) {
        expectedException = true;
    }
    // Add column should work even if the table is enabled
    assertFalse(expectedException);
    modifiedHtd = this.admin.getTableDescriptor(tableName);
    hcd = modifiedHtd.getFamily(xtracol.getName());
    assertTrue(hcd != null);
    assertTrue(hcd.getValue(xtracolName).equals(xtracolName));
    // Delete the just-added column.
    this.admin.deleteColumnFamily(tableName, xtracol.getName());
    modifiedHtd = this.admin.getTableDescriptor(tableName);
    hcd = modifiedHtd.getFamily(xtracol.getName());
    assertTrue(hcd == null);
    // Delete the table
    this.admin.disableTable(tableName);
    this.admin.deleteTable(tableName);
    this.admin.listTables();
    assertFalse(this.admin.tableExists(tableName));
}
Also used : TableNotDisabledException(org.apache.hadoop.hbase.TableNotDisabledException) TableName(org.apache.hadoop.hbase.TableName) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 73 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class TestAdmin1 method testEnableTableRetainAssignment.

/**
   * Test retain assignment on enableTable.
   *
   * @throws IOException
   */
@Test(timeout = 300000)
public void testEnableTableRetainAssignment() throws IOException {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    byte[][] splitKeys = { new byte[] { 1, 1, 1 }, new byte[] { 2, 2, 2 }, new byte[] { 3, 3, 3 }, new byte[] { 4, 4, 4 }, new byte[] { 5, 5, 5 }, new byte[] { 6, 6, 6 }, new byte[] { 7, 7, 7 }, new byte[] { 8, 8, 8 }, new byte[] { 9, 9, 9 } };
    int expectedRegions = splitKeys.length + 1;
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, splitKeys);
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(tableName)) {
        List<HRegionLocation> regions = l.getAllRegionLocations();
        assertEquals("Tried to create " + expectedRegions + " regions " + "but only found " + regions.size(), expectedRegions, regions.size());
        // Disable table.
        admin.disableTable(tableName);
        // Enable table, use retain assignment to assign regions.
        admin.enableTable(tableName);
        List<HRegionLocation> regions2 = l.getAllRegionLocations();
        // Check the assignment.
        assertEquals(regions.size(), regions2.size());
        assertTrue(regions2.containsAll(regions));
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 74 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class TestAdmin1 method testDeleteEditUnknownColumnFamilyAndOrTable.

@Test(timeout = 300000)
public void testDeleteEditUnknownColumnFamilyAndOrTable() throws IOException {
    // Test we get exception if we try to
    final TableName nonexistentTable = TableName.valueOf("nonexistent");
    final byte[] nonexistentColumn = Bytes.toBytes("nonexistent");
    HColumnDescriptor nonexistentHcd = new HColumnDescriptor(nonexistentColumn);
    Exception exception = null;
    try {
        this.admin.addColumnFamily(nonexistentTable, nonexistentHcd);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.deleteTable(nonexistentTable);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.deleteColumnFamily(nonexistentTable, nonexistentColumn);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.disableTable(nonexistentTable);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.enableTable(nonexistentTable);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.modifyColumnFamily(nonexistentTable, nonexistentHcd);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        HTableDescriptor htd = new HTableDescriptor(nonexistentTable);
        htd.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
        this.admin.modifyTable(htd.getTableName(), htd);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    // Now make it so at least the table exists and then do tests against a
    // nonexistent column family -- see if we get right exceptions.
    final TableName tableName = TableName.valueOf(name.getMethodName() + System.currentTimeMillis());
    HTableDescriptor htd = new HTableDescriptor(tableName);
    htd.addFamily(new HColumnDescriptor("cf"));
    this.admin.createTable(htd);
    try {
        exception = null;
        try {
            this.admin.deleteColumnFamily(htd.getTableName(), nonexistentHcd.getName());
        } catch (IOException e) {
            exception = e;
        }
        assertTrue("found=" + exception.getClass().getName(), exception instanceof InvalidFamilyOperationException);
        exception = null;
        try {
            this.admin.modifyColumnFamily(htd.getTableName(), nonexistentHcd);
        } catch (IOException e) {
            exception = e;
        }
        assertTrue("found=" + exception.getClass().getName(), exception instanceof InvalidFamilyOperationException);
    } finally {
        this.admin.disableTable(tableName);
        this.admin.deleteTable(tableName);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) InvalidFamilyOperationException(org.apache.hadoop.hbase.InvalidFamilyOperationException) IOException(java.io.IOException) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) InvalidFamilyOperationException(org.apache.hadoop.hbase.InvalidFamilyOperationException) MergeRegionException(org.apache.hadoop.hbase.exceptions.MergeRegionException) MasterNotRunningException(org.apache.hadoop.hbase.MasterNotRunningException) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException) ZooKeeperConnectionException(org.apache.hadoop.hbase.ZooKeeperConnectionException) IOException(java.io.IOException) TableNotDisabledException(org.apache.hadoop.hbase.TableNotDisabledException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 75 with TableName

use of org.apache.hadoop.hbase.TableName in project hbase by apache.

the class TestAdmin1 method testCreateTableNumberOfRegions.

@Test(timeout = 300000)
public void testCreateTableNumberOfRegions() throws IOException, InterruptedException {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc);
    List<HRegionLocation> regions;
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(tableName)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have only 1 region", 1, regions.size());
    }
    TableName TABLE_2 = TableName.valueOf(tableName.getNameAsString() + "_2");
    desc = new HTableDescriptor(TABLE_2);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, new byte[][] { new byte[] { 42 } });
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(TABLE_2)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have only 2 region", 2, regions.size());
    }
    TableName TABLE_3 = TableName.valueOf(tableName.getNameAsString() + "_3");
    desc = new HTableDescriptor(TABLE_3);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, "a".getBytes(), "z".getBytes(), 3);
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(TABLE_3)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have only 3 region", 3, regions.size());
    }
    TableName TABLE_4 = TableName.valueOf(tableName.getNameAsString() + "_4");
    desc = new HTableDescriptor(TABLE_4);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    try {
        admin.createTable(desc, "a".getBytes(), "z".getBytes(), 2);
        fail("Should not be able to create a table with only 2 regions using this API.");
    } catch (IllegalArgumentException eae) {
    // Expected
    }
    TableName TABLE_5 = TableName.valueOf(tableName.getNameAsString() + "_5");
    desc = new HTableDescriptor(TABLE_5);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, new byte[] { 1 }, new byte[] { 127 }, 16);
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(TABLE_5)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have 16 region", 16, regions.size());
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Aggregations

TableName (org.apache.hadoop.hbase.TableName)1033 Test (org.junit.Test)695 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)257 Table (org.apache.hadoop.hbase.client.Table)228 IOException (java.io.IOException)225 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)215 HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)203 Result (org.apache.hadoop.hbase.client.Result)125 ArrayList (java.util.ArrayList)120 Put (org.apache.hadoop.hbase.client.Put)118 Path (org.apache.hadoop.fs.Path)113 Connection (org.apache.hadoop.hbase.client.Connection)103 Scan (org.apache.hadoop.hbase.client.Scan)98 ResultScanner (org.apache.hadoop.hbase.client.ResultScanner)89 ServerName (org.apache.hadoop.hbase.ServerName)85 Admin (org.apache.hadoop.hbase.client.Admin)85 Cell (org.apache.hadoop.hbase.Cell)77 HashMap (java.util.HashMap)75 Delete (org.apache.hadoop.hbase.client.Delete)66 InterruptedIOException (java.io.InterruptedIOException)63