Search in sources :

Example 16 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class TestNamespace method createTableInDefaultNamespace.

@Test
public void createTableInDefaultNamespace() throws Exception {
    TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()));
    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf1")).build();
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
    TableDescriptor tableDescriptor = tableDescriptorBuilder.build();
    admin.createTable(tableDescriptor);
    assertTrue(admin.listTableDescriptors().size() == 1);
    admin.disableTable(tableDescriptor.getTableName());
    admin.deleteTable(tableDescriptor.getTableName());
}
Also used : TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Test(org.junit.Test)

Example 17 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class TestEncryptionKeyRotation method testCFKeyRotation.

@Test
public void testCFKeyRotation() throws Exception {
    // Create the table schema
    TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf("default", name.getMethodName()));
    ColumnFamilyDescriptorBuilder columnFamilyDescriptorBuilder = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf"));
    String algorithm = conf.get(HConstants.CRYPTO_KEY_ALGORITHM_CONF_KEY, HConstants.CIPHER_AES);
    columnFamilyDescriptorBuilder.setEncryptionType(algorithm);
    columnFamilyDescriptorBuilder.setEncryptionKey(EncryptionUtil.wrapKey(conf, "hbase", initialCFKey));
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptorBuilder.build());
    TableDescriptor tableDescriptor = tableDescriptorBuilder.build();
    // Create the table and some on disk files
    createTableAndFlush(tableDescriptor);
    // Verify we have store file(s) with the initial key
    final List<Path> initialPaths = findStorefilePaths(tableDescriptor.getTableName());
    assertTrue(initialPaths.size() > 0);
    for (Path path : initialPaths) {
        assertTrue("Store file " + path + " has incorrect key", Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
    }
    // Update the schema with a new encryption key
    columnFamilyDescriptorBuilder.setEncryptionKey(EncryptionUtil.wrapKey(conf, conf.get(HConstants.CRYPTO_MASTERKEY_NAME_CONF_KEY, User.getCurrent().getShortName()), secondCFKey));
    TEST_UTIL.getAdmin().modifyColumnFamily(tableDescriptor.getTableName(), columnFamilyDescriptorBuilder.build());
    // Need a predicate for online schema change
    Thread.sleep(5000);
    // And major compact
    TEST_UTIL.getAdmin().majorCompact(tableDescriptor.getTableName());
    // waiting for the major compaction to complete
    TEST_UTIL.waitFor(30000, new Waiter.Predicate<IOException>() {

        @Override
        public boolean evaluate() throws IOException {
            return TEST_UTIL.getAdmin().getCompactionState(tableDescriptor.getTableName()) == CompactionState.NONE;
        }
    });
    List<Path> pathsAfterCompaction = findStorefilePaths(tableDescriptor.getTableName());
    assertTrue(pathsAfterCompaction.size() > 0);
    for (Path path : pathsAfterCompaction) {
        assertTrue("Store file " + path + " has incorrect key", Bytes.equals(secondCFKey.getEncoded(), extractHFileKey(path)));
    }
    List<Path> compactedPaths = findCompactedStorefilePaths(tableDescriptor.getTableName());
    assertTrue(compactedPaths.size() > 0);
    for (Path path : compactedPaths) {
        assertTrue("Store file " + path + " retains initial key", Bytes.equals(initialCFKey.getEncoded(), extractHFileKey(path)));
    }
}
Also used : Path(org.apache.hadoop.fs.Path) ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) IOException(java.io.IOException) Waiter(org.apache.hadoop.hbase.Waiter) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Test(org.junit.Test)

Example 18 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class TestHRegion method testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization.

/**
 * Testcase to check state of region initialization task set to ABORTED or not
 * if any exceptions during initialization
 *
 * @throws Exception
 */
@Test
public void testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization() throws Exception {
    RegionInfo info;
    try {
        FileSystem fs = Mockito.mock(FileSystem.class);
        Mockito.when(fs.exists((Path) Mockito.anyObject())).thenThrow(new IOException());
        TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(tableName);
        ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf")).build();
        tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
        info = RegionInfoBuilder.newBuilder(tableName).build();
        Path path = new Path(dir + "testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization");
        region = HRegion.newHRegion(path, null, fs, CONF, info, tableDescriptorBuilder.build(), null);
        // region initialization throws IOException and set task state to ABORTED.
        region.initialize();
        fail("Region initialization should fail due to IOException");
    } catch (IOException io) {
        List<MonitoredTask> tasks = TaskMonitor.get().getTasks();
        for (MonitoredTask monitoredTask : tasks) {
            if (!(monitoredTask instanceof MonitoredRPCHandler) && monitoredTask.getDescription().contains(region.toString())) {
                assertTrue("Region state should be ABORTED.", monitoredTask.getState().equals(MonitoredTask.State.ABORTED));
                break;
            }
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) MonitoredRPCHandler(org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler) FileSystem(org.apache.hadoop.fs.FileSystem) FaultyFileSystem(org.apache.hadoop.hbase.regionserver.TestHStore.FaultyFileSystem) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) FilterList(org.apache.hadoop.hbase.filter.FilterList) ArrayList(java.util.ArrayList) List(java.util.List) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) MonitoredTask(org.apache.hadoop.hbase.monitoring.MonitoredTask) Test(org.junit.Test)

Example 19 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class TestThriftConnection method createTable.

private TableDescriptor createTable(Admin admin, String tableName) throws IOException {
    TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TableName.valueOf(tableName));
    ColumnFamilyDescriptorBuilder familyABuilder = ColumnFamilyDescriptorBuilder.newBuilder(FAMILYA);
    familyABuilder.setMaxVersions(3);
    ColumnFamilyDescriptorBuilder familyBBuilder = ColumnFamilyDescriptorBuilder.newBuilder(FAMILYB);
    familyBBuilder.setMaxVersions(3);
    ColumnFamilyDescriptorBuilder familyCBuilder = ColumnFamilyDescriptorBuilder.newBuilder(FAMILYC);
    familyCBuilder.setMaxVersions(3);
    builder.setColumnFamily(familyABuilder.build());
    builder.setColumnFamily(familyBBuilder.build());
    builder.setColumnFamily(familyCBuilder.build());
    TableDescriptor tableDescriptor = builder.build();
    admin.createTable(tableDescriptor);
    try (Table table = TEST_UTIL.getConnection().getTable(TableName.valueOf(tableName))) {
        Put put = new Put(ROW_1);
        put.addColumn(FAMILYA, QUALIFIER_1, TS_2, VALUE_1);
        table.put(put);
        put = new Put(ROW_2);
        put.addColumn(FAMILYA, QUALIFIER_1, TS_1, VALUE_1);
        put.addColumn(FAMILYA, QUALIFIER_1, TS_2, VALUE_2);
        put.addColumn(FAMILYB, QUALIFIER_2, TS_2, VALUE_2);
        table.put(put);
    }
    return tableDescriptor;
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Put(org.apache.hadoop.hbase.client.Put)

Example 20 with TableDescriptorBuilder

use of org.apache.hadoop.hbase.client.TableDescriptorBuilder in project hbase by apache.

the class TestThriftConnection method testThriftAdmin.

private void testThriftAdmin(Connection connection, String namespace, String table) throws Exception {
    try (Admin admin = connection.getAdmin()) {
        // create name space
        NamespaceDescriptor namespaceDescriptor = NamespaceDescriptor.create(namespace).build();
        namespaceDescriptor.setConfiguration("key1", "value1");
        namespaceDescriptor.setConfiguration("key2", "value2");
        admin.createNamespace(namespaceDescriptor);
        // list namespace
        NamespaceDescriptor[] namespaceDescriptors = admin.listNamespaceDescriptors();
        boolean found = false;
        for (NamespaceDescriptor nd : namespaceDescriptors) {
            if (nd.getName().equals(namespace)) {
                found = true;
                break;
            }
        }
        assertTrue(found);
        // modify namesapce
        namespaceDescriptor.setConfiguration("kye3", "value3");
        admin.modifyNamespace(namespaceDescriptor);
        // get namespace
        NamespaceDescriptor namespaceDescriptorReturned = admin.getNamespaceDescriptor(namespace);
        assertTrue(namespaceDescriptorReturned.getConfiguration().size() == 3);
        // create table
        TableDescriptor tableDescriptor = createTable(admin, table);
        // modify table
        TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor);
        builder.setDurability(Durability.ASYNC_WAL);
        admin.modifyTable(builder.build());
        // modify column family
        ColumnFamilyDescriptor familyA = tableDescriptor.getColumnFamily(FAMILYA);
        ColumnFamilyDescriptorBuilder familyABuilder = ColumnFamilyDescriptorBuilder.newBuilder(familyA);
        familyABuilder.setInMemory(true);
        admin.modifyColumnFamily(tableDescriptor.getTableName(), familyABuilder.build());
        // add column family
        ColumnFamilyDescriptorBuilder familyDBuilder = ColumnFamilyDescriptorBuilder.newBuilder(FAMILYD);
        familyDBuilder.setDataBlockEncoding(DataBlockEncoding.PREFIX);
        admin.addColumnFamily(tableDescriptor.getTableName(), familyDBuilder.build());
        // get table descriptor
        TableDescriptor tableDescriptorReturned = admin.getDescriptor(tableDescriptor.getTableName());
        assertTrue(tableDescriptorReturned.getColumnFamilies().length == 4);
        assertTrue(tableDescriptorReturned.getDurability() == Durability.ASYNC_WAL);
        ColumnFamilyDescriptor columnFamilyADescriptor1Returned = tableDescriptorReturned.getColumnFamily(FAMILYA);
        assertTrue(columnFamilyADescriptor1Returned.isInMemory() == true);
        // delete column family
        admin.deleteColumnFamily(tableDescriptor.getTableName(), FAMILYA);
        tableDescriptorReturned = admin.getDescriptor(tableDescriptor.getTableName());
        assertTrue(tableDescriptorReturned.getColumnFamilies().length == 3);
        // disable table
        admin.disableTable(tableDescriptor.getTableName());
        assertTrue(admin.isTableDisabled(tableDescriptor.getTableName()));
        // enable table
        admin.enableTable(tableDescriptor.getTableName());
        assertTrue(admin.isTableEnabled(tableDescriptor.getTableName()));
        assertTrue(admin.isTableAvailable(tableDescriptor.getTableName()));
        // truncate table
        admin.disableTable(tableDescriptor.getTableName());
        admin.truncateTable(tableDescriptor.getTableName(), true);
        assertTrue(admin.isTableAvailable(tableDescriptor.getTableName()));
        // delete table
        admin.disableTable(tableDescriptor.getTableName());
        admin.deleteTable(tableDescriptor.getTableName());
        assertFalse(admin.tableExists(tableDescriptor.getTableName()));
        // delete namespace
        admin.deleteNamespace(namespace);
        namespaceDescriptors = admin.listNamespaceDescriptors();
        // should have 2 namespace, default and hbase
        found = false;
        for (NamespaceDescriptor nd : namespaceDescriptors) {
            if (nd.getName().equals(namespace)) {
                found = true;
                break;
            }
        }
        assertTrue(found == false);
    }
}
Also used : ColumnFamilyDescriptorBuilder(org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder) NamespaceDescriptor(org.apache.hadoop.hbase.NamespaceDescriptor) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) Admin(org.apache.hadoop.hbase.client.Admin) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Aggregations

TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)190 ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)76 Test (org.junit.Test)68 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)61 ColumnFamilyDescriptorBuilder (org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder)47 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)39 TableName (org.apache.hadoop.hbase.TableName)34 Path (org.apache.hadoop.fs.Path)31 Admin (org.apache.hadoop.hbase.client.Admin)29 Put (org.apache.hadoop.hbase.client.Put)25 IOException (java.io.IOException)24 Configuration (org.apache.hadoop.conf.Configuration)20 Table (org.apache.hadoop.hbase.client.Table)18 ArrayList (java.util.ArrayList)17 FileSystem (org.apache.hadoop.fs.FileSystem)15 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)15 Before (org.junit.Before)12 Cell (org.apache.hadoop.hbase.Cell)11 NamespaceDescriptor (org.apache.hadoop.hbase.NamespaceDescriptor)10 HashMap (java.util.HashMap)9