use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.
the class IntegrationTestIngestStripeCompactions method initTable.
@Override
protected void initTable() throws IOException {
// Do the same as the LoadTestTool does, but with different table configuration.
TableDescriptor tableDescriptor = TableDescriptorBuilder.newBuilder(getTablename()).setValue(StoreEngine.STORE_ENGINE_CLASS_KEY, StripeStoreEngine.class.getName()).setValue(HStore.BLOCKING_STOREFILES_KEY, "100").build();
ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.of(HFileTestUtil.DEFAULT_COLUMN_FAMILY);
HBaseTestingUtil.createPreSplitLoadTestTable(util.getConfiguration(), tableDescriptor, familyDescriptor);
}
use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.
the class IntegrationTestIngestWithEncryption method setUp.
@Before
@Override
public void setUp() throws Exception {
// Initialize the cluster. This invokes LoadTestTool -init_only, which
// will create the test table, appropriately pre-split
super.setUp();
if (!initialized) {
return;
}
// Update the test table schema so HFiles from this point will be written with
// encryption features enabled.
final Admin admin = util.getAdmin();
TableDescriptor tableDescriptor = admin.getDescriptor(getTablename());
for (ColumnFamilyDescriptor columnDescriptor : tableDescriptor.getColumnFamilies()) {
ColumnFamilyDescriptor updatedColumn = ColumnFamilyDescriptorBuilder.newBuilder(columnDescriptor).setEncryptionType("AES").build();
LOG.info("Updating CF schema for " + getTablename() + "." + columnDescriptor.getNameAsString());
admin.disableTable(getTablename());
admin.modifyColumnFamily(getTablename(), updatedColumn);
admin.enableTable(getTablename());
util.waitFor(30000, 1000, true, new Predicate<IOException>() {
@Override
public boolean evaluate() throws IOException {
return admin.isTableAvailable(getTablename());
}
});
}
}
use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.
the class TestMultiRowResource method setUpBeforeClass.
@BeforeClass
public static void setUpBeforeClass() throws Exception {
conf = TEST_UTIL.getConfiguration();
conf.setBoolean(RESTServer.REST_CSRF_ENABLED_KEY, csrfEnabled);
if (csrfEnabled) {
conf.set(RESTServer.REST_CSRF_BROWSER_USERAGENTS_REGEX_KEY, ".*");
}
extraHdr = new BasicHeader(RESTServer.REST_CSRF_CUSTOM_HEADER_DEFAULT, "");
TEST_UTIL.startMiniCluster();
REST_TEST_UTIL.startServletContainer(conf);
context = JAXBContext.newInstance(CellModel.class, CellSetModel.class, RowModel.class);
marshaller = context.createMarshaller();
unmarshaller = context.createUnmarshaller();
client = new Client(new Cluster().add("localhost", REST_TEST_UTIL.getServletPort()));
Admin admin = TEST_UTIL.getAdmin();
if (admin.tableExists(TABLE)) {
return;
}
TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TABLE);
ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFA)).build();
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(CFB)).build();
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
admin.createTable(tableDescriptorBuilder.build());
}
use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.
the class CloneSnapshotProcedure method updateTableDescriptorWithSFT.
/**
* If a StoreFileTracker is specified we strip the TableDescriptor from previous SFT config
* and set the specified SFT on the table level
*/
private void updateTableDescriptorWithSFT() {
if (StringUtils.isEmpty(customSFT)) {
return;
}
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
builder.setValue(StoreFileTrackerFactory.TRACKER_IMPL, customSFT);
for (ColumnFamilyDescriptor family : tableDescriptor.getColumnFamilies()) {
ColumnFamilyDescriptorBuilder cfBuilder = ColumnFamilyDescriptorBuilder.newBuilder(family);
cfBuilder.setConfiguration(StoreFileTrackerFactory.TRACKER_IMPL, null);
cfBuilder.setValue(StoreFileTrackerFactory.TRACKER_IMPL, null);
builder.modifyColumnFamily(cfBuilder.build());
}
tableDescriptor = builder.build();
}
use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.
the class CatalogJanitor method checkDaughterInFs.
/**
* Checks if a daughter region -- either splitA or splitB -- still holds references to parent.
* @param parent Parent region
* @param daughter Daughter region
* @return A pair where the first boolean says whether or not the daughter region directory exists
* in the filesystem and then the second boolean says whether the daughter has references
* to the parent.
*/
private static Pair<Boolean, Boolean> checkDaughterInFs(MasterServices services, final RegionInfo parent, final RegionInfo daughter) throws IOException {
if (daughter == null) {
return new Pair<>(Boolean.FALSE, Boolean.FALSE);
}
FileSystem fs = services.getMasterFileSystem().getFileSystem();
Path rootdir = services.getMasterFileSystem().getRootDir();
Path tabledir = CommonFSUtils.getTableDir(rootdir, daughter.getTable());
Path daughterRegionDir = new Path(tabledir, daughter.getEncodedName());
HRegionFileSystem regionFs;
try {
if (!CommonFSUtils.isExists(fs, daughterRegionDir)) {
return new Pair<>(Boolean.FALSE, Boolean.FALSE);
}
} catch (IOException ioe) {
LOG.error("Error trying to determine if daughter region exists, " + "assuming exists and has references", ioe);
return new Pair<>(Boolean.TRUE, Boolean.TRUE);
}
boolean references = false;
TableDescriptor parentDescriptor = services.getTableDescriptors().get(parent.getTable());
try {
regionFs = HRegionFileSystem.openRegionFromFileSystem(services.getConfiguration(), fs, tabledir, daughter, true);
for (ColumnFamilyDescriptor family : parentDescriptor.getColumnFamilies()) {
references = regionFs.hasReferences(family.getNameAsString());
if (references) {
break;
}
}
} catch (IOException e) {
LOG.error("Error trying to determine referenced files from : " + daughter.getEncodedName() + ", to: " + parent.getEncodedName() + " assuming has references", e);
return new Pair<>(Boolean.TRUE, Boolean.TRUE);
}
return new Pair<>(Boolean.TRUE, references);
}
Aggregations