Search in sources :

Example 21 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestColumnFamilyDescriptorDefaultVersions method testHColumnDescriptorCachedMaxVersions.

@Test
public void testHColumnDescriptorCachedMaxVersions() throws Exception {
    ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(FAMILY).setMaxVersions(5).build();
    // Verify the max version
    assertEquals(5, familyDescriptor.getMaxVersions());
    // modify the max version
    familyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(familyDescriptor).setValue(Bytes.toBytes(HConstants.VERSIONS), Bytes.toBytes("8")).build();
    // Verify the max version
    assertEquals(8, familyDescriptor.getMaxVersions());
}
Also used : ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) Test(org.junit.Test)

Example 22 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestColumnFamilyDescriptorDefaultVersions method verifyHColumnDescriptor.

private void verifyHColumnDescriptor(int expected, final TableName tableName, final byte[]... families) throws IOException {
    Admin admin = TEST_UTIL.getAdmin();
    // Verify descriptor from master
    TableDescriptor htd = admin.getDescriptor(tableName);
    ColumnFamilyDescriptor[] hcds = htd.getColumnFamilies();
    verifyColumnFamilyDescriptor(expected, hcds, tableName, families);
    // Verify descriptor from HDFS
    MasterFileSystem mfs = TEST_UTIL.getMiniHBaseCluster().getMaster().getMasterFileSystem();
    Path tableDir = CommonFSUtils.getTableDir(mfs.getRootDir(), tableName);
    TableDescriptor td = FSTableDescriptors.getTableDescriptorFromFs(mfs.getFileSystem(), tableDir);
    hcds = td.getColumnFamilies();
    verifyColumnFamilyDescriptor(expected, hcds, tableName, families);
}
Also used : MasterFileSystem(org.apache.hadoop.hbase.master.MasterFileSystem) Path(org.apache.hadoop.fs.Path) Admin(org.apache.hadoop.hbase.client.Admin) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor)

Example 23 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestNamespace method createTableInDefaultNamespace.

@Test
public void createTableInDefaultNamespace() throws Exception {
    TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(TableName.valueOf(name.getMethodName()));
    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf1")).build();
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
    TableDescriptor tableDescriptor = tableDescriptorBuilder.build();
    admin.createTable(tableDescriptor);
    assertTrue(admin.listTableDescriptors().size() == 1);
    admin.disableTable(tableDescriptor.getTableName());
    admin.deleteTable(tableDescriptor.getTableName());
}
Also used : TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Test(org.junit.Test)

Example 24 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestEncryptionKeyRotation method createTableAndFlush.

private void createTableAndFlush(TableDescriptor tableDescriptor) throws Exception {
    ColumnFamilyDescriptor cfd = tableDescriptor.getColumnFamilies()[0];
    // Create the test table
    TEST_UTIL.getAdmin().createTable(tableDescriptor);
    TEST_UTIL.waitTableAvailable(tableDescriptor.getTableName(), 5000);
    // Create a store file
    Table table = TEST_UTIL.getConnection().getTable(tableDescriptor.getTableName());
    try {
        table.put(new Put(Bytes.toBytes("testrow")).addColumn(cfd.getName(), Bytes.toBytes("q"), Bytes.toBytes("value")));
    } finally {
        table.close();
    }
    TEST_UTIL.getAdmin().flush(tableDescriptor.getTableName());
}
Also used : Table(org.apache.hadoop.hbase.client.Table) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) Put(org.apache.hadoop.hbase.client.Put)

Example 25 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestHRegion method testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization.

/**
 * Testcase to check state of region initialization task set to ABORTED or not
 * if any exceptions during initialization
 *
 * @throws Exception
 */
@Test
public void testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization() throws Exception {
    RegionInfo info;
    try {
        FileSystem fs = Mockito.mock(FileSystem.class);
        Mockito.when(fs.exists((Path) Mockito.anyObject())).thenThrow(new IOException());
        TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(tableName);
        ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("cf")).build();
        tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
        info = RegionInfoBuilder.newBuilder(tableName).build();
        Path path = new Path(dir + "testStatusSettingToAbortIfAnyExceptionDuringRegionInitilization");
        region = HRegion.newHRegion(path, null, fs, CONF, info, tableDescriptorBuilder.build(), null);
        // region initialization throws IOException and set task state to ABORTED.
        region.initialize();
        fail("Region initialization should fail due to IOException");
    } catch (IOException io) {
        List<MonitoredTask> tasks = TaskMonitor.get().getTasks();
        for (MonitoredTask monitoredTask : tasks) {
            if (!(monitoredTask instanceof MonitoredRPCHandler) && monitoredTask.getDescription().contains(region.toString())) {
                assertTrue("Region state should be ABORTED.", monitoredTask.getState().equals(MonitoredTask.State.ABORTED));
                break;
            }
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) MonitoredRPCHandler(org.apache.hadoop.hbase.monitoring.MonitoredRPCHandler) FileSystem(org.apache.hadoop.fs.FileSystem) FaultyFileSystem(org.apache.hadoop.hbase.regionserver.TestHStore.FaultyFileSystem) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) FilterList(org.apache.hadoop.hbase.filter.FilterList) ArrayList(java.util.ArrayList) List(java.util.List) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) MonitoredTask(org.apache.hadoop.hbase.monitoring.MonitoredTask) Test(org.junit.Test)

Aggregations

ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)199 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)95 Test (org.junit.Test)92 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)78 IOException (java.io.IOException)44 TableName (org.apache.hadoop.hbase.TableName)44 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)42 Path (org.apache.hadoop.fs.Path)41 Admin (org.apache.hadoop.hbase.client.Admin)36 Configuration (org.apache.hadoop.conf.Configuration)34 ArrayList (java.util.ArrayList)32 Put (org.apache.hadoop.hbase.client.Put)32 FileSystem (org.apache.hadoop.fs.FileSystem)28 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)24 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)22 Get (org.apache.hadoop.hbase.client.Get)20 Result (org.apache.hadoop.hbase.client.Result)19 ColumnFamilyDescriptorBuilder (org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder)17 Scan (org.apache.hadoop.hbase.client.Scan)17 Table (org.apache.hadoop.hbase.client.Table)17