Search in sources :

Example 6 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class IntegrationTestIngestStripeCompactions method initTable.

@Override
protected void initTable() throws IOException {
    // Do the same as the LoadTestTool does, but with different table configuration.
    TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(getTablename()).setValue(StoreEngine.STORE_ENGINE_CLASS_KEY, StripeStoreEngine.class.getName()).setValue(HStore.BLOCKING_STOREFILES_KEY, "100").build();
    ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(HFileTestUtil.DEFAULT_COLUMN_FAMILY);
    HBaseTestingUtil.createPreSplitLoadTestTable(util.getConfiguration(), tableDescriptor, familyDescriptor);
}
Also used : StripeStoreEngine(org.apache.hadoop.hbase.regionserver.StripeStoreEngine) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 7 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class IntegrationTestIngestWithEncryption method setUp.

@Before
@Override
public void setUp() throws Exception {
    // Initialize the cluster. This invokes LoadTestTool -init_only, which
    // will create the test table, appropriately pre-split
    super.setUp();
    if (!initialized) {
        return;
    }
    // Update the test table schema so HFiles from this point will be written with
    // encryption features enabled.
    final Admin admin = util.getAdmin();
    TableDescriptor tableDescriptor = admin.getDescriptor(getTablename());
    for (ColumnFamilyDescriptor columnDescriptor : tableDescriptor.getColumnFamilies()) {
        ColumnFamilyDescriptor updatedColumn = ColumnFamilyDescriptorBuilder.newBuilder(columnDescriptor).setEncryptionType("AES").build();
        LOG.info("Updating CF schema for " + getTablename() + "." + columnDescriptor.getNameAsString());
        admin.disableTable(getTablename());
        admin.modifyColumnFamily(getTablename(), updatedColumn);
        admin.enableTable(getTablename());
        util.waitFor(30000, 1000, true, new Predicate<IOException>() {

            @Override
            public boolean evaluate() throws IOException {
                return admin.isTableAvailable(getTablename());
            }
        });
    }
}
Also used : IOException(java.io.IOException) Admin(org.apache.hadoop.hbase.client.Admin) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Before(org.junit.Before)

Example 8 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestMultiRowResource method setUpBeforeClass.

@BeforeClass
public static void setUpBeforeClass() throws Exception {
    conf = TEST_UTIL.getConfiguration();
    conf.setBoolean(RESTServer.REST_CSRF_ENABLED_KEY, csrfEnabled);
    if (csrfEnabled) {
        conf.set(RESTServer.REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY, ".*");
    }
    extraHdr = new BasicHeader(RESTServer.REST_CSRF_CUSTOM_HEADER_DEFAULT, "");
    TEST_UTIL.startMiniCluster();
    REST_TEST_UTIL.startServletContainer(conf);
    context = JAXBContext.newInstance(CellModel.class, CellSetModel.class, RowModel.class);
    marshaller = context.createMarshaller();
    unmarshaller = context.createUnmarshaller();
    client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
    Admin admin = TEST_UTIL.getAdmin();
    if (admin.tableExists(TABLE)) {
        return;
    }
    TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TABLE);
    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFA)).build();
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
    columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFB)).build();
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
    admin.createTable(tableDescriptorBuilder.build());
}
Also used : CellSetModel(org.apache.hadoop.hbase.rest.model.CellSetModel) Cluster(org.apache.hadoop.hbase.rest.client.Cluster) CellModel(org.apache.hadoop.hbase.rest.model.CellModel) RowModel(org.apache.hadoop.hbase.rest.model.RowModel) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) Client(org.apache.hadoop.hbase.rest.client.Client) Admin(org.apache.hadoop.hbase.client.Admin) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) BasicHeader(org.apache.http.message.BasicHeader) BeforeClass(org.junit.BeforeClass)

Example 9 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class CloneSnapshotProcedure method updateTableDescriptorWithSFT.

/**
 * If a StoreFileTracker is specified we strip the TableDescriptor from previous SFT config
 * and set the specified SFT on the table level
 */
private void updateTableDescriptorWithSFT() {
    if (StringUtils.isEmpty(customSFT)) {
        return;
    }
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
    builder.setValue(StoreFileTrackerFactory.TRACKER_IMPL, customSFT);
    for (ColumnFamilyDescriptor family : tableDescriptor.getColumnFamilies()) {
        ColumnFamilyDescriptorBuilder cfBuilder = ColumnFamilyDescriptorBuilder.newBuilder(family);
        cfBuilder.setConfiguration(StoreFileTrackerFactory.TRACKER_IMPL, null);
        cfBuilder.setValue(StoreFileTrackerFactory.TRACKER_IMPL, null);
        builder.modifyColumnFamily(cfBuilder.build());
    }
    tableDescriptor = builder.build();
}
Also used : ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)

Example 10 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class CatalogJanitor method checkDaughterInFs.

/**
 * Checks if a daughter region -- either splitA or splitB -- still holds references to parent.
 * @param parent Parent region
 * @param daughter Daughter region
 * @return A pair where the first boolean says whether or not the daughter region directory exists
 *         in the filesystem and then the second boolean says whether the daughter has references
 *         to the parent.
 */
private static Pair<Boolean, Boolean> checkDaughterInFs(MasterServices services, final RegionInfo parent, final RegionInfo daughter) throws IOException {
    if (daughter == null) {
        return new Pair<>(Boolean.FALSE, Boolean.FALSE);
    }
    FileSystem fs = services.getMasterFileSystem().getFileSystem();
    Path rootdir = services.getMasterFileSystem().getRootDir();
    Path tabledir = CommonFSUtils.getTableDir(rootdir, daughter.getTable());
    Path daughterRegionDir = new Path(tabledir, daughter.getEncodedName());
    HRegionFileSystem regionFs;
    try {
        if (!CommonFSUtils.isExists(fs, daughterRegionDir)) {
            return new Pair<>(Boolean.FALSE, Boolean.FALSE);
        }
    } catch (IOException ioe) {
        LOG.error("Error trying to determine if daughter region exists, " + "assuming exists and has references", ioe);
        return new Pair<>(Boolean.TRUE, Boolean.TRUE);
    }
    boolean references = false;
    TableDescriptor parentDescriptor = services.getTableDescriptors().get(parent.getTable());
    try {
        regionFs = HRegionFileSystem.openRegionFromFileSystem(services.getConfiguration(), fs, tabledir, daughter, true);
        for (ColumnFamilyDescriptor family : parentDescriptor.getColumnFamilies()) {
            references = regionFs.hasReferences(family.getNameAsString());
            if (references) {
                break;
            }
        }
    } catch (IOException e) {
        LOG.error("Error trying to determine referenced files from : " + daughter.getEncodedName() + ", to: " + parent.getEncodedName() + " assuming has references", e);
        return new Pair<>(Boolean.TRUE, Boolean.TRUE);
    }
    return new Pair<>(Boolean.TRUE, references);
}
Also used : Path(org.apache.hadoop.fs.Path) HRegionFileSystem(org.apache.hadoop.hbase.regionserver.HRegionFileSystem) FileSystem(org.apache.hadoop.fs.FileSystem) HRegionFileSystem(org.apache.hadoop.hbase.regionserver.HRegionFileSystem) IOException(java.io.IOException) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Pair(org.apache.hadoop.hbase.util.Pair)

Aggregations

ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)199 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)95 Test (org.junit.Test)92 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)78 IOException (java.io.IOException)44 TableName (org.apache.hadoop.hbase.TableName)44 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)42 Path (org.apache.hadoop.fs.Path)41 Admin (org.apache.hadoop.hbase.client.Admin)36 Configuration (org.apache.hadoop.conf.Configuration)34 ArrayList (java.util.ArrayList)32 Put (org.apache.hadoop.hbase.client.Put)32 FileSystem (org.apache.hadoop.fs.FileSystem)28 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)24 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)22 Get (org.apache.hadoop.hbase.client.Get)20 Result (org.apache.hadoop.hbase.client.Result)19 ColumnFamilyDescriptorBuilder (org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder)17 Scan (org.apache.hadoop.hbase.client.Scan)17 Table (org.apache.hadoop.hbase.client.Table)17