Search in sources :

Example 31 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class HRegionFileSystem method insertRegionFilesIntoStoreTracker.

private void insertRegionFilesIntoStoreTracker(List<Path> allFiles, MasterProcedureEnv env, HRegionFileSystem regionFs) throws IOException {
    TableDescriptor tblDesc = env.getMasterServices().getTableDescriptors().get(regionInfo.getTable());
    // we need to map trackers per store
    Map<String, StoreFileTracker> trackerMap = new HashMap<>();
    // we need to map store files per store
    Map<String, List<StoreFileInfo>> fileInfoMap = new HashMap<>();
    for (Path file : allFiles) {
        String familyName = file.getParent().getName();
        trackerMap.computeIfAbsent(familyName, t -> StoreFileTrackerFactory.create(conf, tblDesc, tblDesc.getColumnFamily(Bytes.toBytes(familyName)), regionFs));
        fileInfoMap.computeIfAbsent(familyName, l -> new ArrayList<>());
        List<StoreFileInfo> infos = fileInfoMap.get(familyName);
        infos.add(new StoreFileInfo(conf, fs, file, true));
    }
    for (Map.Entry<String, StoreFileTracker> entry : trackerMap.entrySet()) {
        entry.getValue().add(fileInfoMap.get(entry.getKey()));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) HashMap(java.util.HashMap) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) ArrayList(java.util.ArrayList) List(java.util.List) StoreFileTracker(org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTracker) HashMap(java.util.HashMap) Map(java.util.Map)

Example 32 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class IncreasingToUpperBoundRegionSplitPolicy method configureForRegion.

@Override
protected void configureForRegion(HRegion region) {
    super.configureForRegion(region);
    Configuration conf = getConf();
    initialSize = conf.getLong("hbase.increasing.policy.initial.size", -1);
    if (initialSize > 0) {
        return;
    }
    TableDescriptor desc = region.getTableDescriptor();
    if (desc != null) {
        initialSize = 2 * desc.getMemStoreFlushSize();
    }
    if (initialSize <= 0) {
        initialSize = 2 * conf.getLong(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, TableDescriptorBuilder.DEFAULT_MEMSTORE_FLUSH_SIZE);
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 33 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class IntegrationTestIngestStripeCompactions method initTable.

@Override
protected void initTable() throws IOException {
    // Do the same as the LoadTestTool does, but with different table configuration.
    TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(getTablename()).setValue(StoreEngine.STORE_ENGINE_CLASS_KEY, StripeStoreEngine.class.getName()).setValue(HStore.BLOCKING_STOREFILES_KEY, "100").build();
    ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(HFileTestUtil.DEFAULT_COLUMN_FAMILY);
    HBaseTestingUtil.createPreSplitLoadTestTable(util.getConfiguration(), tableDescriptor, familyDescriptor);
}
Also used : StripeStoreEngine(org.apache.hadoop.hbase.regionserver.StripeStoreEngine) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 34 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class IntegrationTestIngestWithEncryption method setUp.

@Before
@Override
public void setUp() throws Exception {
    // Initialize the cluster. This invokes LoadTestTool -init_only, which
    // will create the test table, appropriately pre-split
    super.setUp();
    if (!initialized) {
        return;
    }
    // Update the test table schema so HFiles from this point will be written with
    // encryption features enabled.
    final Admin admin = util.getAdmin();
    TableDescriptor tableDescriptor = admin.getDescriptor(getTablename());
    for (ColumnFamilyDescriptor columnDescriptor : tableDescriptor.getColumnFamilies()) {
        ColumnFamilyDescriptor updatedColumn = ColumnFamilyDescriptorBuilder.newBuilder(columnDescriptor).setEncryptionType("AES").build();
        LOG.info("Updating CF schema for " + getTablename() + "." + columnDescriptor.getNameAsString());
        admin.disableTable(getTablename());
        admin.modifyColumnFamily(getTablename(), updatedColumn);
        admin.enableTable(getTablename());
        util.waitFor(30000, 1000, true, new Predicate<IOException>() {

            @Override
            public boolean evaluate() throws IOException {
                return admin.isTableAvailable(getTablename());
            }
        });
    }
}
Also used : IOException(java.io.IOException) Admin(org.apache.hadoop.hbase.client.Admin) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Before(org.junit.Before)

Example 35 with TableDescriptor

use of org.apache.hadoop.hbase.client.TableDescriptor in project hbase by apache.

the class TestScannersWithFilters method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    TEST_UTIL.startMiniCluster(3);
    REST_TEST_UTIL.startServletContainer(TEST_UTIL.getConfiguration());
    context = JAXBContext.newInstance(CellModel.class, CellSetModel.class, RowModel.class, ScannerModel.class);
    marshaller = context.createMarshaller();
    unmarshaller = context.createUnmarshaller();
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    Admin admin = TEST_UTIL.getAdmin();
    if (!admin.tableExists(TABLE)) {
        TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(TABLE).setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILIES[0])).setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILIES[1])).build();
        admin.createTable(tableDescriptor);
        Table table = TEST_UTIL.getConnection().getTable(TABLE);
        // Insert first half
        for (byte[] ROW : ROWS_ONE) {
            Put p = new Put(ROW);
            p.setDurability(Durability.SKIP_WAL);
            for (byte[] QUALIFIER : QUALIFIERS_ONE) {
                p.addColumn(FAMILIES[0], QUALIFIER, VALUES[0]);
            }
            table.put(p);
        }
        for (byte[] ROW : ROWS_TWO) {
            Put p = new Put(ROW);
            p.setDurability(Durability.SKIP_WAL);
            for (byte[] QUALIFIER : QUALIFIERS_TWO) {
                p.addColumn(FAMILIES[1], QUALIFIER, VALUES[1]);
            }
            table.put(p);
        }
        // Insert second half (reverse families)
        for (byte[] ROW : ROWS_ONE) {
            Put p = new Put(ROW);
            p.setDurability(Durability.SKIP_WAL);
            for (byte[] QUALIFIER : QUALIFIERS_ONE) {
                p.addColumn(FAMILIES[1], QUALIFIER, VALUES[0]);
            }
            table.put(p);
        }
        for (byte[] ROW : ROWS_TWO) {
            Put p = new Put(ROW);
            p.setDurability(Durability.SKIP_WAL);
            for (byte[] QUALIFIER : QUALIFIERS_TWO) {
                p.addColumn(FAMILIES[0], QUALIFIER, VALUES[1]);
            }
            table.put(p);
        }
        // Delete the second qualifier from all rows and families
        for (byte[] ROW : ROWS_ONE) {
            Delete d = new Delete(ROW);
            d.addColumns(FAMILIES[0], QUALIFIERS_ONE[1]);
            d.addColumns(FAMILIES[1], QUALIFIERS_ONE[1]);
            table.delete(d);
        }
        for (byte[] ROW : ROWS_TWO) {
            Delete d = new Delete(ROW);
            d.addColumns(FAMILIES[0], QUALIFIERS_TWO[1]);
            d.addColumns(FAMILIES[1], QUALIFIERS_TWO[1]);
            table.delete(d);
        }
        colsPerRow -= 2;
        // Delete the second rows from both groups, one column at a time
        for (byte[] QUALIFIER : QUALIFIERS_ONE) {
            Delete d = new Delete(ROWS_ONE[1]);
            d.addColumns(FAMILIES[0], QUALIFIER);
            d.addColumns(FAMILIES[1], QUALIFIER);
            table.delete(d);
        }
        for (byte[] QUALIFIER : QUALIFIERS_TWO) {
            Delete d = new Delete(ROWS_TWO[1]);
            d.addColumns(FAMILIES[0], QUALIFIER);
            d.addColumns(FAMILIES[1], QUALIFIER);
            table.delete(d);
        }
        numRows -= 2;
        table.close();
    }
}
Also used : Delete(org.apache.hadoop.hbase.client.Delete) Table(org.apache.hadoop.hbase.client.Table) CellSetModel(org.apache.hadoop.hbase.rest.model.CellSetModel) Cluster(org.apache.hadoop.hbase.rest.client.Cluster) CellModel(org.apache.hadoop.hbase.rest.model.CellModel) RowModel(org.apache.hadoop.hbase.rest.model.RowModel) Client(org.apache.hadoop.hbase.rest.client.Client) Admin(org.apache.hadoop.hbase.client.Admin) ScannerModel(org.apache.hadoop.hbase.rest.model.ScannerModel) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Put(org.apache.hadoop.hbase.client.Put) BeforeClass(org.junit.BeforeClass)

Aggregations

TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)639 Test (org.junit.Test)356 TableName (org.apache.hadoop.hbase.TableName)237 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)180 IOException (java.io.IOException)151 Put (org.apache.hadoop.hbase.client.Put)142 Admin (org.apache.hadoop.hbase.client.Admin)136 Path (org.apache.hadoop.fs.Path)124 Table (org.apache.hadoop.hbase.client.Table)121 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)96 Configuration (org.apache.hadoop.conf.Configuration)91 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)77 ArrayList (java.util.ArrayList)75 FileSystem (org.apache.hadoop.fs.FileSystem)66 Result (org.apache.hadoop.hbase.client.Result)66 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)64 Connection (org.apache.hadoop.hbase.client.Connection)59 Scan (org.apache.hadoop.hbase.client.Scan)50 Get (org.apache.hadoop.hbase.client.Get)49 List (java.util.List)39