Search in sources :

Example 71 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class VisibilityUtils method createVisibilityLabelFilter.

public static Filter createVisibilityLabelFilter(Region region, Authorizations authorizations) throws IOException {
    Map<ByteRange, Integer> cfVsMaxVersions = new HashMap<>();
    for (ColumnFamilyDescriptor hcd : region.getTableDescriptor().getColumnFamilies()) {
        cfVsMaxVersions.put(new SimpleMutableByteRange(hcd.getName()), hcd.getMaxVersions());
    }
    VisibilityLabelService vls = VisibilityLabelServiceManager.getInstance().getVisibilityLabelService();
    Filter visibilityLabelFilter = new VisibilityLabelFilter(vls.getVisibilityExpEvaluator(authorizations), cfVsMaxVersions);
    return visibilityLabelFilter;
}
Also used : ByteRange(org.apache.hadoop.hbase.util.ByteRange) SimpleMutableByteRange(org.apache.hadoop.hbase.util.SimpleMutableByteRange) HashMap(java.util.HashMap) Filter(org.apache.hadoop.hbase.filter.Filter) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) SimpleMutableByteRange(org.apache.hadoop.hbase.util.SimpleMutableByteRange)

Example 72 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestModifyTableProcedure method testConcurrentDeleteColumnFamily.

@Test
public void testConcurrentDeleteColumnFamily() throws IOException, InterruptedException {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(tableName);
    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(column_Family1)).build();
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
    columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(column_Family2)).build();
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
    columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(column_Family3)).build();
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
    UTIL.getAdmin().createTable(tableDescriptorBuilder.build());
    class ConcurrentCreateDeleteTable extends Thread {

        TableName tableName = null;

        String columnFamily = null;

        boolean exception;

        public ConcurrentCreateDeleteTable(TableName tableName, String columnFamily) {
            this.tableName = tableName;
            this.columnFamily = columnFamily;
            this.exception = false;
        }

        public void run() {
            try {
                UTIL.getAdmin().deleteColumnFamily(tableName, columnFamily.getBytes());
            } catch (Exception e) {
                if (e.getClass().equals(ConcurrentTableModificationException.class)) {
                    this.exception = true;
                }
            }
        }
    }
    ConcurrentCreateDeleteTable t1 = new ConcurrentCreateDeleteTable(tableName, column_Family2);
    ConcurrentCreateDeleteTable t2 = new ConcurrentCreateDeleteTable(tableName, column_Family3);
    t1.start();
    t2.start();
    t1.join();
    t2.join();
    int noOfColumnFamilies = UTIL.getAdmin().getDescriptor(tableName).getColumnFamilies().length;
    assertTrue("Expected ConcurrentTableModificationException.", ((t1.exception || t2.exception) && noOfColumnFamilies == 2) || noOfColumnFamilies == 1);
}
Also used : TableName(org.apache.hadoop.hbase.TableName) ConcurrentTableModificationException(org.apache.hadoop.hbase.ConcurrentTableModificationException) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) InvalidFamilyOperationException(org.apache.hadoop.hbase.InvalidFamilyOperationException) DoNotRetryIOException(org.apache.hadoop.hbase.DoNotRetryIOException) IOException(java.io.IOException) ConcurrentTableModificationException(org.apache.hadoop.hbase.ConcurrentTableModificationException) Test(org.junit.Test)

Example 73 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestModifyTableProcedure method testRecoveryAndDoubleExecutionOnline.

@Test
public void testRecoveryAndDoubleExecutionOnline() throws Exception {
    final TableName tableName = TableName.valueOf(name.getMethodName());
    final String cf2 = "cf2";
    final String cf3 = "cf3";
    final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
    // create the table
    RegionInfo[] regions = MasterProcedureTestingUtility.createTable(procExec, tableName, null, "cf1", cf3);
    ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
    // Modify multiple properties of the table.
    TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(UTIL.getAdmin().getDescriptor(tableName));
    ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(cf2)).build();
    boolean newCompactionEnableOption = !tableDescriptorBuilder.build().isCompactionEnabled();
    tableDescriptorBuilder.setCompactionEnabled(newCompactionEnableOption);
    tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
    tableDescriptorBuilder.removeColumnFamily(Bytes.toBytes(cf3));
    // Start the Modify procedure && kill the executor
    long procId = procExec.submitProcedure(new ModifyTableProcedure(procExec.getEnvironment(), tableDescriptorBuilder.build()));
    // Restart the executor and execute the step twice
    MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, procId);
    // Validate descriptor
    TableDescriptor currentHtd = UTIL.getAdmin().getDescriptor(tableName);
    assertEquals(newCompactionEnableOption, currentHtd.isCompactionEnabled());
    assertEquals(2, currentHtd.getColumnFamilyNames().size());
    assertTrue(currentHtd.hasColumnFamily(Bytes.toBytes(cf2)));
    assertFalse(currentHtd.hasColumnFamily(Bytes.toBytes(cf3)));
    // cf2 should be added cf3 should be removed
    MasterProcedureTestingUtility.validateTableCreation(UTIL.getHBaseCluster().getMaster(), tableName, regions, "cf1", cf2);
}
Also used : TableName(org.apache.hadoop.hbase.TableName) RegionInfo(org.apache.hadoop.hbase.client.RegionInfo) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) TableDescriptor(org.apache.hadoop.hbase.client.TableDescriptor) Test(org.junit.Test)

Example 74 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestHStore method testCreateWriter.

/**
 * Verify that compression and data block encoding are respected by the
 * createWriter method, used on store flush.
 */
@Test
public void testCreateWriter() throws Exception {
    Configuration conf = HBaseConfiguration.create();
    FileSystem fs = FileSystem.get(conf);
    ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.newBuilder(family).setCompressionType(Compression.Algorithm.GZ).setDataBlockEncoding(DataBlockEncoding.DIFF).build();
    init(name.getMethodName(), conf, hcd);
    // Test createWriter
    StoreFileWriter writer = store.getStoreEngine().createWriter(CreateStoreFileWriterParams.create().maxKeyCount(4).compression(hcd.getCompressionType()).isCompaction(false).includeMVCCReadpoint(true).includesTag(false).shouldDropBehind(false));
    Path path = writer.getPath();
    writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1)));
    writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2)));
    writer.append(new KeyValue(row2, family, qf1, Bytes.toBytes(3)));
    writer.append(new KeyValue(row2, family, qf2, Bytes.toBytes(4)));
    writer.close();
    // Verify that compression and encoding settings are respected
    HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), true, conf);
    assertEquals(hcd.getCompressionType(), reader.getTrailer().getCompressionCodec());
    assertEquals(hcd.getDataBlockEncoding(), reader.getDataBlockEncoding());
    reader.close();
}
Also used : Path(org.apache.hadoop.fs.Path) KeyValue(org.apache.hadoop.hbase.KeyValue) Configuration(org.apache.hadoop.conf.Configuration) CompactionConfiguration(org.apache.hadoop.hbase.regionserver.compactions.CompactionConfiguration) HBaseConfiguration(org.apache.hadoop.hbase.HBaseConfiguration) FileSystem(org.apache.hadoop.fs.FileSystem) FilterFileSystem(org.apache.hadoop.fs.FilterFileSystem) LocalFileSystem(org.apache.hadoop.fs.LocalFileSystem) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor) HFile(org.apache.hadoop.hbase.io.hfile.HFile) CacheConfig(org.apache.hadoop.hbase.io.hfile.CacheConfig) Test(org.junit.Test)

Example 75 with ColumnFamilyDescriptor

use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.

the class TestHRegionServerBulkLoad method setupTable.

/**
 * Creates a table with given table name and specified number of column
 * families if the table does not already exist.
 */
public void setupTable(TableName table, int cfs) throws IOException {
    try {
        LOG.info("Creating table " + table);
        TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(table);
        tableDescriptorBuilder.setCoprocessor(MyObserver.class.getName());
        MyObserver.sleepDuration = this.sleepDuration;
        for (int i = 0; i < 10; i++) {
            ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(family(i))).build();
            tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
        }
        UTIL.getAdmin().createTable(tableDescriptorBuilder.build());
    } catch (TableExistsException tee) {
        LOG.info("Table " + table + " already exists");
    }
}
Also used : TableExistsException(org.apache.hadoop.hbase.TableExistsException) TableDescriptorBuilder(org.apache.hadoop.hbase.client.TableDescriptorBuilder) ColumnFamilyDescriptor(org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)

Aggregations

ColumnFamilyDescriptor (org.apache.hadoop.hbase.client.ColumnFamilyDescriptor)199 TableDescriptor (org.apache.hadoop.hbase.client.TableDescriptor)95 Test (org.junit.Test)92 TableDescriptorBuilder (org.apache.hadoop.hbase.client.TableDescriptorBuilder)78 IOException (java.io.IOException)44 TableName (org.apache.hadoop.hbase.TableName)44 RegionInfo (org.apache.hadoop.hbase.client.RegionInfo)42 Path (org.apache.hadoop.fs.Path)41 Admin (org.apache.hadoop.hbase.client.Admin)36 Configuration (org.apache.hadoop.conf.Configuration)34 ArrayList (java.util.ArrayList)32 Put (org.apache.hadoop.hbase.client.Put)32 FileSystem (org.apache.hadoop.fs.FileSystem)28 HRegion (org.apache.hadoop.hbase.regionserver.HRegion)24 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)22 Get (org.apache.hadoop.hbase.client.Get)20 Result (org.apache.hadoop.hbase.client.Result)19 ColumnFamilyDescriptorBuilder (org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder)17 Scan (org.apache.hadoop.hbase.client.Scan)17 Table (org.apache.hadoop.hbase.client.Table)17