use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.
the class TestNamespace method createTableInDefaultNamespace.
@Test
public void createTableInDefaultNamespace() throws Exception {
TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()));
ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf1")).build();
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
TableDescriptor tableDescriptor = tableDescriptorBuilder.build();
admin.createTable(tableDescriptor);
assertTrue(admin.listTableDescriptors().size() == 1);
admin.disableTable(tableDescriptor.getTableName());
admin.deleteTable(tableDescriptor.getTableName());
}
use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.
the class TestEncryptionKeyRotation method testCFKeyRotation.
@Test
public void testCFKeyRotation() throws Exception {
// Create the table schema
TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf("default", name.getMethodName()));
ColumnFamilyDescriptorBuilder columnFamilyDescriptorBuilder = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf"));
String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
columnFamilyDescriptorBuilder.setEncryptionType(algorithm);
columnFamilyDescriptorBuilder.setEncryptionKey(EncryptionUtil.wrapKey(conf, "hbase", initialCFKey));
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptorBuilder.build());
TableDescriptor tableDescriptor = tableDescriptorBuilder.build();
// Create the table and some on disk files
createTableAndFlush(tableDescriptor);
// Verify we have store file(s) with the initial key
final List<Path> initialPaths = findStorefilePaths(tableDescriptor.getTableName());
assertTrue(initialPaths.size() > 0);
for (Path path : initialPaths) {
assertTrue("Store file " + path + " has incorrect key", Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
}
// Update the schema with a new encryption key
columnFamilyDescriptorBuilder.setEncryptionKey(EncryptionUtil.wrapKey(conf, conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()), secondCFKey));
TEST_UTIL.getAdmin().modifyColumnFamily(tableDescriptor.getTableName(), columnFamilyDescriptorBuilder.build());
// Need a predicate for online schema change
Thread.sleep(5000);
// And major compact
TEST_UTIL.getAdmin().majorCompact(tableDescriptor.getTableName());
// waiting for the major compaction to complete
TEST_UTIL.waitFor(30000, new Waiter.Predicate<IOException>() {
@Override
public boolean evaluate() throws IOException {
return TEST_UTIL.getAdmin().getCompactionState(tableDescriptor.getTableName()) == CompactionState.NONE;
}
});
List<Path> pathsAfterCompaction = findStorefilePaths(tableDescriptor.getTableName());
assertTrue(pathsAfterCompaction.size() > 0);
for (Path path : pathsAfterCompaction) {
assertTrue("Store file " + path + " has incorrect key", Bytes.equals(secondCFKey.getEncoded(), extractHFileKey(path)));
}
List<Path> compactedPaths = findCompactedStorefilePaths(tableDescriptor.getTableName());
assertTrue(compactedPaths.size() > 0);
for (Path path : compactedPaths) {
assertTrue("Store file " + path + " retains initial key", Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
}
}
use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.
the class TestHRegion method testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization.
/**
* Testcase to check state of region initialization task set to ABORTED or not
* if any exceptions during initialization
*
* @throws Exception
*/
@Test
public void testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization() throws Exception {
RegionInfo info;
try {
FileSystem fs = Mockito.mock(FileSystem.class);
Mockito.when(fs.exists((Path) Mockito.anyObject())).thenThrow(new IOException());
TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(tableName);
ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf")).build();
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
info = RegionInfoBuilder.newBuilder(tableName).build();
Path path = new Path(dir + "testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization");
region = HRegion.newHRegion(path, null, fs, CONF, info, tableDescriptorBuilder.build(), null);
// region initialization throws IOException and set task state to ABORTED.
region.initialize();
fail("Region initialization should fail due to IOException");
} catch (IOException io) {
List<MonitoredTask> tasks = TaskMonitor.get().getTasks();
for (MonitoredTask monitoredTask : tasks) {
if (!(monitoredTask instanceof MonitoredRPCHandler) && monitoredTask.getDescription().contains(region.toString())) {
assertTrue("Region state should be ABORTED.", monitoredTask.getState().equals(MonitoredTask.State.ABORTED));
break;
}
}
}
}
use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.
the class TestThriftConnection method createTable.
private TableDescriptor createTable(Admin admin, String tableName) throws IOException {
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName));
ColumnFamilyDescriptorBuilder familyABuilder = ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA);
familyABuilder.setMaxVersions(3);
ColumnFamilyDescriptorBuilder familyBBuilder = ColumnFamilyDescriptorBuilder.newBuilder(FAMILYB);
familyBBuilder.setMaxVersions(3);
ColumnFamilyDescriptorBuilder familyCBuilder = ColumnFamilyDescriptorBuilder.newBuilder(FAMILYC);
familyCBuilder.setMaxVersions(3);
builder.setColumnFamily(familyABuilder.build());
builder.setColumnFamily(familyBBuilder.build());
builder.setColumnFamily(familyCBuilder.build());
TableDescriptor tableDescriptor = builder.build();
admin.createTable(tableDescriptor);
try (Table table = TEST_UTIL.getConnection().getTable(TableName.valueOf(tableName))) {
Put put = new Put(ROW_1);
put.addColumn(FAMILYA, QUALIFIER_1, TS_2, VALUE_1);
table.put(put);
put = new Put(ROW_2);
put.addColumn(FAMILYA, QUALIFIER_1, TS_1, VALUE_1);
put.addColumn(FAMILYA, QUALIFIER_1, TS_2, VALUE_2);
put.addColumn(FAMILYB, QUALIFIER_2, TS_2, VALUE_2);
table.put(put);
}
return tableDescriptor;
}
use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.
the class TestThriftConnection method testThriftAdmin.
private void testThriftAdmin(Connection connection, String namespace, String table) throws Exception {
try (Admin admin = connection.getAdmin()) {
// create name space
NamespaceDescriptor namespaceDescriptor = NamespaceDescriptor.create(namespace).build();
namespaceDescriptor.setConfiguration("key1", "value1");
namespaceDescriptor.setConfiguration("key2", "value2");
admin.createNamespace(namespaceDescriptor);
// list namespace
NamespaceDescriptor[] namespaceDescriptors = admin.listNamespaceDescriptors();
boolean found = false;
for (NamespaceDescriptor nd : namespaceDescriptors) {
if (nd.getName().equals(namespace)) {
found = true;
break;
}
}
assertTrue(found);
// modify namesapce
namespaceDescriptor.setConfiguration("kye3", "value3");
admin.modifyNamespace(namespaceDescriptor);
// get namespace
NamespaceDescriptor namespaceDescriptorReturned = admin.getNamespaceDescriptor(namespace);
assertTrue(namespaceDescriptorReturned.getConfiguration().size() == 3);
// create table
TableDescriptor tableDescriptor = createTable(admin, table);
// modify table
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
builder.setDurability(Durability.ASYNC_WAL);
admin.modifyTable(builder.build());
// modify column family
ColumnFamilyDescriptor familyA = tableDescriptor.getColumnFamily(FAMILYA);
ColumnFamilyDescriptorBuilder familyABuilder = ColumnFamilyDescriptorBuilder.newBuilder(familyA);
familyABuilder.setInMemory(true);
admin.modifyColumnFamily(tableDescriptor.getTableName(), familyABuilder.build());
// add column family
ColumnFamilyDescriptorBuilder familyDBuilder = ColumnFamilyDescriptorBuilder.newBuilder(FAMILYD);
familyDBuilder.setDataBlockEncoding(DataBlockEncoding.PREFIX);
admin.addColumnFamily(tableDescriptor.getTableName(), familyDBuilder.build());
// get table descriptor
TableDescriptor tableDescriptorReturned = admin.getDescriptor(tableDescriptor.getTableName());
assertTrue(tableDescriptorReturned.getColumnFamilies().length == 4);
assertTrue(tableDescriptorReturned.getDurability() == Durability.ASYNC_WAL);
ColumnFamilyDescriptor columnFamilyADescriptor1Returned = tableDescriptorReturned.getColumnFamily(FAMILYA);
assertTrue(columnFamilyADescriptor1Returned.isInMemory() == true);
// delete column family
admin.deleteColumnFamily(tableDescriptor.getTableName(), FAMILYA);
tableDescriptorReturned = admin.getDescriptor(tableDescriptor.getTableName());
assertTrue(tableDescriptorReturned.getColumnFamilies().length == 3);
// disable table
admin.disableTable(tableDescriptor.getTableName());
assertTrue(admin.isTableDisabled(tableDescriptor.getTableName()));
// enable table
admin.enableTable(tableDescriptor.getTableName());
assertTrue(admin.isTableEnabled(tableDescriptor.getTableName()));
assertTrue(admin.isTableAvailable(tableDescriptor.getTableName()));
// truncate table
admin.disableTable(tableDescriptor.getTableName());
admin.truncateTable(tableDescriptor.getTableName(), true);
assertTrue(admin.isTableAvailable(tableDescriptor.getTableName()));
// delete table
admin.disableTable(tableDescriptor.getTableName());
admin.deleteTable(tableDescriptor.getTableName());
assertFalse(admin.tableExists(tableDescriptor.getTableName()));
// delete namespace
admin.deleteNamespace(namespace);
namespaceDescriptors = admin.listNamespaceDescriptors();
// should have 2 namespace, default and hbase
found = false;
for (NamespaceDescriptor nd : namespaceDescriptors) {
if (nd.getName().equals(namespace)) {
found = true;
break;
}
}
assertTrue(found == false);
}
}
Aggregations