use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.
the class VisibilityUtils method createVisibilityLabelFilter.
public static Filter createVisibilityLabelFilter(Region region, Authorizations authorizations) throws IOException {
Map<ByteRange, Integer> cfVsMaxVersions = new HashMap<>();
for (ColumnFamilyDescriptor hcd : region.getTableDescriptor().getColumnFamilies()) {
cfVsMaxVersions.put(new SimpleMutableByteRange(hcd.getName()), hcd.getMaxVersions());
}
VisibilityLabelService vls = VisibilityLabelServiceManager.getInstance().getVisibilityLabelService();
Filter visibilityLabelFilter = new VisibilityLabelFilter(vls.getVisibilityExpEvaluator(authorizations), cfVsMaxVersions);
return visibilityLabelFilter;
}
use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.
the class TestModifyTableProcedure method testConcurrentDeleteColumnFamily.
@Test
public void testConcurrentDeleteColumnFamily() throws IOException, InterruptedException {
final TableName tableName = TableName.valueOf(name.getMethodName());
TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(tableName);
ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(column_Family1)).build();
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(column_Family2)).build();
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(column_Family3)).build();
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
UTIL.getAdmin().createTable(tableDescriptorBuilder.build());
class ConcurrentCreateDeleteTable extends Thread {
TableName tableName = null;
String columnFamily = null;
boolean exception;
public ConcurrentCreateDeleteTable(TableName tableName, String columnFamily) {
this.tableName = tableName;
this.columnFamily = columnFamily;
this.exception = false;
}
public void run() {
try {
UTIL.getAdmin().deleteColumnFamily(tableName, columnFamily.getBytes());
} catch (Exception e) {
if (e.getClass().equals(ConcurrentTableModificationException.class)) {
this.exception = true;
}
}
}
}
ConcurrentCreateDeleteTable t1 = new ConcurrentCreateDeleteTable(tableName, column_Family2);
ConcurrentCreateDeleteTable t2 = new ConcurrentCreateDeleteTable(tableName, column_Family3);
t1.start();
t2.start();
t1.join();
t2.join();
int noOfColumnFamilies = UTIL.getAdmin().getDescriptor(tableName).getColumnFamilies().length;
assertTrue("Expected ConcurrentTableModificationException.", ((t1.exception || t2.exception) && noOfColumnFamilies == 2) || noOfColumnFamilies == 1);
}
use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.
the class TestModifyTableProcedure method testRecoveryAndDoubleExecutionOnline.
@Test
public void testRecoveryAndDoubleExecutionOnline() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName());
final String cf2 = "cf2";
final String cf3 = "cf3";
final ProcedureExecutor<MasterProcedureEnv> procExec = getMasterProcedureExecutor();
// create the table
RegionInfo[] regions = MasterProcedureTestingUtility.createTable(procExec, tableName, null, "cf1", cf3);
ProcedureTestingUtility.setKillAndToggleBeforeStoreUpdate(procExec, true);
// Modify multiple properties of the table.
TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(UTIL.getAdmin().getDescriptor(tableName));
ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(cf2)).build();
boolean newCompactionEnableOption = !tableDescriptorBuilder.build().isCompactionEnabled();
tableDescriptorBuilder.setCompactionEnabled(newCompactionEnableOption);
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
tableDescriptorBuilder.removeColumnFamily(Bytes.toBytes(cf3));
// Start the Modify procedure && kill the executor
long procId = procExec.submitProcedure(new ModifyTableProcedure(procExec.getEnvironment(), tableDescriptorBuilder.build()));
// Restart the executor and execute the step twice
MasterProcedureTestingUtility.testRecoveryAndDoubleExecution(procExec, procId);
// Validate descriptor
TableDescriptor currentHtd = UTIL.getAdmin().getDescriptor(tableName);
assertEquals(newCompactionEnableOption, currentHtd.isCompactionEnabled());
assertEquals(2, currentHtd.getColumnFamilyNames().size());
assertTrue(currentHtd.hasColumnFamily(Bytes.toBytes(cf2)));
assertFalse(currentHtd.hasColumnFamily(Bytes.toBytes(cf3)));
// cf2 should be added cf3 should be removed
MasterProcedureTestingUtility.validateTableCreation(UTIL.getHBaseCluster().getMaster(), tableName, regions, "cf1", cf2);
}
use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.
the class TestHStore method testCreateWriter.
/**
* Verify that compression and data block encoding are respected by the
* createWriter method, used on store flush.
*/
@Test
public void testCreateWriter() throws Exception {
Configuration conf = HBaseConfiguration.create();
FileSystem fs = FileSystem.get(conf);
ColumnFamilyDescriptor hcd = ColumnFamilyDescriptorBuilder.newBuilder(family).setCompressionType(Compression.Algorithm.GZ).setDataBlockEncoding(DataBlockEncoding.DIFF).build();
init(name.getMethodName(), conf, hcd);
// Test createWriter
StoreFileWriter writer = store.getStoreEngine().createWriter(CreateStoreFileWriterParams.create().maxKeyCount(4).compression(hcd.getCompressionType()).isCompaction(false).includeMVCCReadpoint(true).includesTag(false).shouldDropBehind(false));
Path path = writer.getPath();
writer.append(new KeyValue(row, family, qf1, Bytes.toBytes(1)));
writer.append(new KeyValue(row, family, qf2, Bytes.toBytes(2)));
writer.append(new KeyValue(row2, family, qf1, Bytes.toBytes(3)));
writer.append(new KeyValue(row2, family, qf2, Bytes.toBytes(4)));
writer.close();
// Verify that compression and encoding settings are respected
HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf), true, conf);
assertEquals(hcd.getCompressionType(), reader.getTrailer().getCompressionCodec());
assertEquals(hcd.getDataBlockEncoding(), reader.getDataBlockEncoding());
reader.close();
}
use of org.apache.hadoop.hbase.client.ColumnFamilyDescriptor in project hbase by apache.
the class TestHRegionServerBulkLoad method setupTable.
/**
* Creates a table with given table name and specified number of column
* families if the table does not already exist.
*/
public void setupTable(TableName table, int cfs) throws IOException {
try {
LOG.info("Creating table " + table);
TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(table);
tableDescriptorBuilder.setCoprocessor(MyObserver.class.getName());
MyObserver.sleepDuration = this.sleepDuration;
for (int i = 0; i < 10; i++) {
ColumnFamilyDescriptor columnFamilyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(family(i))).build();
tableDescriptorBuilder.setColumnFamily(columnFamilyDescriptor);
}
UTIL.getAdmin().createTable(tableDescriptorBuilder.build());
} catch (TableExistsException tee) {
LOG.info("Table " + table + " already exists");
}
}
Aggregations