Search in sources :

Example 16 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class MobUtils method getMobColumnFamilies.

/**
 * Get list of Mob column families (if any exists)
 * @param htd table descriptor
 * @return list of Mob column families
 */
public static List<ColumnFamilyDescriptor> getMobColumnFamilies(TableDescriptor htd) {
    List<ColumnFamilyDescriptor> fams = new ArrayList<ColumnFamilyDescriptor>();
    ColumnFamilyDescriptor[] hcds = htd.getColumnFamilies();
    for (ColumnFamilyDescriptor hcd : hcds) {
        if (hcd.isMobEnabled()) {
            fams.add(hcd);
        }
    }
    return fams;
}
Also used : ArrayList(java.util.ArrayList) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)

Example 17 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class RemoveColumnAction method perform.

@Override
public void perform() throws Exception {
    TableDescriptor tableDescriptor = admin.getDescriptor(tableName);
    ColumnFamilyDescriptor[] columnDescriptors = tableDescriptor.getColumnFamilies();
    if (columnDescriptors.length <= (protectedColumns == null ? 1 : protectedColumns.size())) {
        return;
    }
    int index = random.nextInt(columnDescriptors.length);
    while (protectedColumns != null && protectedColumns.contains(columnDescriptors[index].getNameAsString())) {
        index = random.nextInt(columnDescriptors.length);
    }
    byte[] colDescName = columnDescriptors[index].getName();
    getLogger().debug("Performing action: Removing " + Bytes.toString(colDescName) + " from " + tableName.getNameAsString());
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
    builder.removeColumnFamily(colDescName);
    // Don't try the modify if we're stopping
    if (context.isStopping()) {
        return;
    }
    admin.modifyTable(builder.build());
}
Also used : TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 18 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestTableScan method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    conf = TEST_UTIL.getConfiguration();
    conf.set(Constants.CUSTOM_FILTERS, "CustomFilter:" + CustomFilter.class.getName());
    TEST_UTIL.startMiniCluster();
    REST_TEST_UTIL.startServletContainer(conf);
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    Admin admin = TEST_UTIL.getAdmin();
    if (!admin.tableExists(TABLE)) {
        TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TABLE);
        ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFA)).build();
        tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
        columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFB)).build();
        tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
        admin.createTable(tableDescriptorBuilder.build());
        expectedRows1 = TestScannerResource.insertData(conf, TABLE, COLUMN_1, 1.0);
        expectedRows2 = TestScannerResource.insertData(conf, TABLE, COLUMN_2, 0.5);
        expectedRows3 = TestScannerResource.insertData(conf, TABLE, COLUMN_EMPTY, 1.0);
    }
}
Also used : Cluster(org.apache.hadoop.hbase.rest.client.Cluster) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) Client(org.apache.hadoop.hbase.rest.client.Client) Admin(org.apache.hadoop.hbase.client.Admin) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) BeforeClass(org.junit.BeforeClass)

Example 19 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class MasterRegion method tryMigrate.

private static void tryMigrate(Configuration conf, FileSystem fs, Path tableDir, RegionInfo regionInfo, TableDescriptor oldTd, TableDescriptor newTd) throws IOException {
    Class<? extends StoreFileTracker> oldSft = StoreFileTrackerFactory.getTrackerClass(oldTd.getValue(StoreFileTrackerFactory.TRACKER_IMPL));
    Class<? extends StoreFileTracker> newSft = StoreFileTrackerFactory.getTrackerClass(newTd.getValue(StoreFileTrackerFactory.TRACKER_IMPL));
    if (oldSft.equals(newSft)) {
        LOG.debug("old store file tracker {} is the same with new store file tracker, skip migration", StoreFileTrackerFactory.getStoreFileTrackerName(oldSft));
        if (!oldTd.equals(newTd)) {
            // we may change other things such as adding a new family, so here we still need to persist
            // the new table descriptor
            LOG.info("Update table descriptor from {} to {}", oldTd, newTd);
            FSTableDescriptors.createTableDescriptorForTableDirectory(fs, tableDir, newTd, true);
        }
        return;
    }
    LOG.info("Migrate store file tracker from {} to {}", oldSft.getSimpleName(), newSft.getSimpleName());
    HRegionFileSystem hfs = HRegionFileSystem.openRegionFromFileSystem(conf, fs, tableDir, regionInfo, false);
    for (ColumnFamilyDescriptor oldCfd : oldTd.getColumnFamilies()) {
        StoreFileTracker oldTracker = StoreFileTrackerFactory.create(conf, oldTd, oldCfd, hfs);
        StoreFileTracker newTracker = StoreFileTrackerFactory.create(conf, oldTd, oldCfd, hfs);
        List<StoreFileInfo> files = oldTracker.load();
        LOG.debug("Store file list for {}: {}", oldCfd.getNameAsString(), files);
        newTracker.set(oldTracker.load());
    }
    // persist the new table descriptor after migration
    LOG.info("Update table descriptor from {} to {}", oldTd, newTd);
    FSTableDescriptors.createTableDescriptorForTableDirectory(fs, tableDir, newTd, true);
}
Also used : HRegionFileSystem(org.apache.hadoop.hbase.regionserver.HRegionFileSystem) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) StoreFileTracker(org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker) StoreFileInfo(org.apache.hadoop.hbase.regionserver.StoreFileInfo)

Example 20 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestHBaseMetaEdit method testEditMeta.

/**
 * Set versions, set HBASE-16213 indexed block encoding, and add a column family.
 * Delete the column family. Then try to delete a core hbase:meta family (should fail).
 * Verify they are all in place by looking at TableDescriptor AND by checking
 * what the RegionServer sees after opening Region.
 */
@Test
public void testEditMeta() throws IOException {
    Admin admin = UTIL.getAdmin();
    admin.tableExists(TableName.META_TABLE_NAME);
    TableDescriptor originalDescriptor = getMetaDescriptor();
    ColumnFamilyDescriptor cfd = originalDescriptor.getColumnFamily(HConstants.CATALOG_FAMILY);
    int oldVersions = cfd.getMaxVersions();
    // Add '1' to current versions count. Set encoding too.
    cfd = ColumnFamilyDescriptorBuilder.newBuilder(cfd).setMaxVersions(oldVersions + 1).setConfiguration(ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING, DataBlockEncoding.ROW_INDEX_V1.toString()).build();
    admin.modifyColumnFamily(TableName.META_TABLE_NAME, cfd);
    byte[] extraColumnFamilyName = Bytes.toBytes("xtra");
    ColumnFamilyDescriptor newCfd = ColumnFamilyDescriptorBuilder.newBuilder(extraColumnFamilyName).build();
    admin.addColumnFamily(TableName.META_TABLE_NAME, newCfd);
    TableDescriptor descriptor = getMetaDescriptor();
    // Assert new max versions is == old versions plus 1.
    assertEquals(oldVersions + 1, descriptor.getColumnFamily(HConstants.CATALOG_FAMILY).getMaxVersions());
    descriptor = getMetaDescriptor();
    // Assert new max versions is == old versions plus 1.
    assertEquals(oldVersions + 1, descriptor.getColumnFamily(HConstants.CATALOG_FAMILY).getMaxVersions());
    assertTrue(descriptor.getColumnFamily(newCfd.getName()) != null);
    String encoding = descriptor.getColumnFamily(HConstants.CATALOG_FAMILY).getConfiguration().get(ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING);
    assertEquals(encoding, DataBlockEncoding.ROW_INDEX_V1.toString());
    Region r = UTIL.getHBaseCluster().getRegionServer(0).getRegion(RegionInfoBuilder.FIRST_META_REGIONINFO.getEncodedName());
    assertEquals(oldVersions + 1, r.getStore(HConstants.CATALOG_FAMILY).getColumnFamilyDescriptor().getMaxVersions());
    encoding = r.getStore(HConstants.CATALOG_FAMILY).getColumnFamilyDescriptor().getConfigurationValue(ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING);
    assertEquals(encoding, DataBlockEncoding.ROW_INDEX_V1.toString());
    assertTrue(r.getStore(extraColumnFamilyName) != null);
    // Assert we can't drop critical hbase:meta column family but we can drop any other.
    admin.deleteColumnFamily(TableName.META_TABLE_NAME, newCfd.getName());
    descriptor = getMetaDescriptor();
    assertTrue(descriptor.getColumnFamily(newCfd.getName()) == null);
    try {
        admin.deleteColumnFamily(TableName.META_TABLE_NAME, HConstants.CATALOG_FAMILY);
        fail("Should not reach here");
    } catch (HBaseIOException hioe) {
        assertTrue(hioe.getMessage().contains("Delete of hbase:meta"));
    }
}
Also used : Region(org.apache.hadoop.hbase.regionserver.Region) Admin(org.apache.hadoop.hbase.client.Admin) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Test(org.junit.Test)

Aggregations

ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)199 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)95 Test (org.junit.Test)92 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)78 IOException (java.io.IOException)44 TableName (org.apache.hadoop.hbase.TableName)44 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)42 Path (org.apache.hadoop.fs.Path)41 Admin (org.apache.hadoop.hbase.client.Admin)36 Configuration (org.apache.hadoop.conf.Configuration)34 ArrayList (java.util.ArrayList)32 Put (org.apache.hadoop.hbase.client.Put)32 FileSystem (org.apache.hadoop.fs.FileSystem)28 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)24 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)22 Get (org.apache.hadoop.hbase.client.Get)20 Result (org.apache.hadoop.hbase.client.Result)19 ColumnFamilyDescriptorBuilder (org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder)17 Scan (org.apache.hadoop.hbase.client.Scan)17 Table (org.apache.hadoop.hbase.client.Table)17