Search in sources :

Example 26 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestAdmin1 method testEnableTableRetainAssignment.

/**
   * Test retain assignment on enableTable.
   *
   * @throws IOException
   */
@Test(timeout = 300000)
public void testEnableTableRetainAssignment() throws IOException {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    byte[][] splitKeys = { new byte[] { 1, 1, 1 }, new byte[] { 2, 2, 2 }, new byte[] { 3, 3, 3 }, new byte[] { 4, 4, 4 }, new byte[] { 5, 5, 5 }, new byte[] { 6, 6, 6 }, new byte[] { 7, 7, 7 }, new byte[] { 8, 8, 8 }, new byte[] { 9, 9, 9 } };
    int expectedRegions = splitKeys.length + 1;
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, splitKeys);
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(tableName)) {
        List<HRegionLocation> regions = l.getAllRegionLocations();
        assertEquals("Tried to create " + expectedRegions + " regions " + "but only found " + regions.size(), expectedRegions, regions.size());
        // Disable table.
        admin.disableTable(tableName);
        // Enable table, use retain assignment to assign regions.
        admin.enableTable(tableName);
        List<HRegionLocation> regions2 = l.getAllRegionLocations();
        // Check the assignment.
        assertEquals(regions.size(), regions2.size());
        assertTrue(regions2.containsAll(regions));
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 27 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestAdmin1 method testDeleteEditUnknownColumnFamilyAndOrTable.

@Test(timeout = 300000)
public void testDeleteEditUnknownColumnFamilyAndOrTable() throws IOException {
    // Test we get exception if we try to
    final TableName nonexistentTable = TableName.valueOf("nonexistent");
    final byte[] nonexistentColumn = Bytes.toBytes("nonexistent");
    HColumnDescriptor nonexistentHcd = new HColumnDescriptor(nonexistentColumn);
    Exception exception = null;
    try {
        this.admin.addColumnFamily(nonexistentTable, nonexistentHcd);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.deleteTable(nonexistentTable);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.deleteColumnFamily(nonexistentTable, nonexistentColumn);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.disableTable(nonexistentTable);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.enableTable(nonexistentTable);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        this.admin.modifyColumnFamily(nonexistentTable, nonexistentHcd);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    exception = null;
    try {
        HTableDescriptor htd = new HTableDescriptor(nonexistentTable);
        htd.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
        this.admin.modifyTable(htd.getTableName(), htd);
    } catch (IOException e) {
        exception = e;
    }
    assertTrue(exception instanceof TableNotFoundException);
    // Now make it so at least the table exists and then do tests against a
    // nonexistent column family -- see if we get right exceptions.
    final TableName tableName = TableName.valueOf(name.getMethodName() + System.currentTimeMillis());
    HTableDescriptor htd = new HTableDescriptor(tableName);
    htd.addFamily(new HColumnDescriptor("cf"));
    this.admin.createTable(htd);
    try {
        exception = null;
        try {
            this.admin.deleteColumnFamily(htd.getTableName(), nonexistentHcd.getName());
        } catch (IOException e) {
            exception = e;
        }
        assertTrue("found=" + exception.getClass().getName(), exception instanceof InvalidFamilyOperationException);
        exception = null;
        try {
            this.admin.modifyColumnFamily(htd.getTableName(), nonexistentHcd);
        } catch (IOException e) {
            exception = e;
        }
        assertTrue("found=" + exception.getClass().getName(), exception instanceof InvalidFamilyOperationException);
    } finally {
        this.admin.disableTable(tableName);
        this.admin.deleteTable(tableName);
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) InvalidFamilyOperationException(org.apache.hadoop.hbase.InvalidFamilyOperationException) IOException(java.io.IOException) TableNotFoundException(org.apache.hadoop.hbase.TableNotFoundException) InvalidFamilyOperationException(org.apache.hadoop.hbase.InvalidFamilyOperationException) MergeRegionException(org.apache.hadoop.hbase.exceptions.MergeRegionException) MasterNotRunningException(org.apache.hadoop.hbase.MasterNotRunningException) TableNotEnabledException(org.apache.hadoop.hbase.TableNotEnabledException) ZooKeeperConnectionException(org.apache.hadoop.hbase.ZooKeeperConnectionException) IOException(java.io.IOException) TableNotDisabledException(org.apache.hadoop.hbase.TableNotDisabledException) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 28 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestAdmin1 method testCreateTableNumberOfRegions.

@Test(timeout = 300000)
public void testCreateTableNumberOfRegions() throws IOException, InterruptedException {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc);
    List<HRegionLocation> regions;
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(tableName)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have only 1 region", 1, regions.size());
    }
    TableName TABLE_2 = TableName.valueOf(tableName.getNameAsString() + "_2");
    desc = new HTableDescriptor(TABLE_2);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, new byte[][] { new byte[] { 42 } });
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(TABLE_2)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have only 2 region", 2, regions.size());
    }
    TableName TABLE_3 = TableName.valueOf(tableName.getNameAsString() + "_3");
    desc = new HTableDescriptor(TABLE_3);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, "a".getBytes(), "z".getBytes(), 3);
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(TABLE_3)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have only 3 region", 3, regions.size());
    }
    TableName TABLE_4 = TableName.valueOf(tableName.getNameAsString() + "_4");
    desc = new HTableDescriptor(TABLE_4);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    try {
        admin.createTable(desc, "a".getBytes(), "z".getBytes(), 2);
        fail("Should not be able to create a table with only 2 regions using this API.");
    } catch (IllegalArgumentException eae) {
    // Expected
    }
    TableName TABLE_5 = TableName.valueOf(tableName.getNameAsString() + "_5");
    desc = new HTableDescriptor(TABLE_5);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, new byte[] { 1 }, new byte[] { 127 }, 16);
    try (RegionLocator l = TEST_UTIL.getConnection().getRegionLocator(TABLE_5)) {
        regions = l.getAllRegionLocations();
        assertEquals("Table should have 16 region", 16, regions.size());
    }
}
Also used : TableName(org.apache.hadoop.hbase.TableName) HRegionLocation(org.apache.hadoop.hbase.HRegionLocation) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Example 29 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestRemoteBackup method testFullBackupRemote.

/**
   * Verify that a remote full backup is created on a single table with data correctly.
   * @throws Exception
   */
@Test
public void testFullBackupRemote() throws Exception {
    LOG.info("test remote full backup on a single table");
    final CountDownLatch latch = new CountDownLatch(1);
    final int NB_ROWS_IN_FAM3 = 6;
    final byte[] fam3Name = Bytes.toBytes("f3");
    final byte[] fam2Name = Bytes.toBytes("f2");
    final Connection conn = ConnectionFactory.createConnection(conf1);
    Thread t = new Thread() {

        @Override
        public void run() {
            try {
                latch.await();
            } catch (InterruptedException ie) {
            }
            try {
                HTable t1 = (HTable) conn.getTable(table1);
                Put p1;
                for (int i = 0; i < NB_ROWS_IN_FAM3; i++) {
                    p1 = new Put(Bytes.toBytes("row-t1" + i));
                    p1.addColumn(fam3Name, qualName, Bytes.toBytes("val" + i));
                    t1.put(p1);
                }
                LOG.debug("Wrote " + NB_ROWS_IN_FAM3 + " rows into family3");
                t1.close();
            } catch (IOException ioe) {
                throw new RuntimeException(ioe);
            }
        }
    };
    t.start();
    table1Desc.addFamily(new HColumnDescriptor(fam3Name));
    // family 2 is MOB enabled
    HColumnDescriptor hcd = new HColumnDescriptor(fam2Name);
    hcd.setMobEnabled(true);
    hcd.setMobThreshold(0L);
    table1Desc.addFamily(hcd);
    HBaseTestingUtility.modifyTableSync(TEST_UTIL.getAdmin(), table1Desc);
    SnapshotTestingUtils.loadData(TEST_UTIL, table1, 50, fam2Name);
    HTable t1 = (HTable) conn.getTable(table1);
    int rows0 = MobSnapshotTestingUtils.countMobRows(t1, fam2Name);
    latch.countDown();
    String backupId = backupTables(BackupType.FULL, Lists.newArrayList(table1), BACKUP_REMOTE_ROOT_DIR);
    assertTrue(checkSucceeded(backupId));
    LOG.info("backup complete " + backupId);
    Assert.assertEquals(TEST_UTIL.countRows(t1, famName), NB_ROWS_IN_BATCH);
    t.join();
    Assert.assertEquals(TEST_UTIL.countRows(t1, fam3Name), NB_ROWS_IN_FAM3);
    t1.close();
    TableName[] tablesRestoreFull = new TableName[] { table1 };
    TableName[] tablesMapFull = new TableName[] { table1_restore };
    BackupAdmin client = getBackupAdmin();
    client.restore(BackupUtils.createRestoreRequest(BACKUP_REMOTE_ROOT_DIR, backupId, false, tablesRestoreFull, tablesMapFull, false));
    // check tables for full restore
    HBaseAdmin hAdmin = TEST_UTIL.getHBaseAdmin();
    assertTrue(hAdmin.tableExists(table1_restore));
    // #5.2 - checking row count of tables for full restore
    HTable hTable = (HTable) conn.getTable(table1_restore);
    Assert.assertEquals(TEST_UTIL.countRows(hTable, famName), NB_ROWS_IN_BATCH);
    int cnt3 = TEST_UTIL.countRows(hTable, fam3Name);
    Assert.assertTrue(cnt3 >= 0 && cnt3 <= NB_ROWS_IN_FAM3);
    int rows1 = MobSnapshotTestingUtils.countMobRows(t1, fam2Name);
    Assert.assertEquals(rows0, rows1);
    hTable.close();
    hAdmin.close();
}
Also used : HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) Connection(org.apache.hadoop.hbase.client.Connection) IOException(java.io.IOException) CountDownLatch(java.util.concurrent.CountDownLatch) HTable(org.apache.hadoop.hbase.client.HTable) Put(org.apache.hadoop.hbase.client.Put) TableName(org.apache.hadoop.hbase.TableName) HBaseAdmin(org.apache.hadoop.hbase.client.HBaseAdmin) Test(org.junit.Test)

Example 30 with HColumnDescriptor

use of org.apache.hadoop.hbase.HColumnDescriptor in project hbase by apache.

the class TestAdmin2 method testGetTableRegions.

/**
   * For HBASE-2556
   * @throws IOException
   */
@Test(timeout = 300000)
public void testGetTableRegions() throws IOException {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    int expectedRegions = 10;
    // Use 80 bit numbers to make sure we aren't limited
    byte[] startKey = { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 };
    byte[] endKey = { 9, 9, 9, 9, 9, 9, 9, 9, 9, 9 };
    HTableDescriptor desc = new HTableDescriptor(tableName);
    desc.addFamily(new HColumnDescriptor(HConstants.CATALOG_FAMILY));
    admin.createTable(desc, startKey, endKey, expectedRegions);
    List<HRegionInfo> RegionInfos = admin.getTableRegions(tableName);
    assertEquals("Tried to create " + expectedRegions + " regions " + "but only found " + RegionInfos.size(), expectedRegions, RegionInfos.size());
}
Also used : HRegionInfo(org.apache.hadoop.hbase.HRegionInfo) TableName(org.apache.hadoop.hbase.TableName) HColumnDescriptor(org.apache.hadoop.hbase.HColumnDescriptor) HTableDescriptor(org.apache.hadoop.hbase.HTableDescriptor) Test(org.junit.Test)

Aggregations

HColumnDescriptor (org.apache.hadoop.hbase.HColumnDescriptor)671 HTableDescriptor (org.apache.hadoop.hbase.HTableDescriptor)554 Test (org.junit.Test)358 TableName (org.apache.hadoop.hbase.TableName)200 HRegionInfo (org.apache.hadoop.hbase.HRegionInfo)137 Put (org.apache.hadoop.hbase.client.Put)132 Table (org.apache.hadoop.hbase.client.Table)117 Admin (org.apache.hadoop.hbase.client.Admin)110 IOException (java.io.IOException)109 Path (org.apache.hadoop.fs.Path)81 HBaseAdmin (org.apache.hadoop.hbase.client.HBaseAdmin)71 ArrayList (java.util.ArrayList)66 Configuration (org.apache.hadoop.conf.Configuration)65 Connection (org.apache.hadoop.hbase.client.Connection)51 Scan (org.apache.hadoop.hbase.client.Scan)50 Result (org.apache.hadoop.hbase.client.Result)45 FileSystem (org.apache.hadoop.fs.FileSystem)44 PhoenixConnection (org.apache.phoenix.jdbc.PhoenixConnection)42 Connection (java.sql.Connection)41 Properties (java.util.Properties)38