use of org.apache.hadoop.hbase.io.encoding.DataBlockEncoding in project hbase by apache.
the class ChangeEncodingAction method perform.
@Override
public void perform() throws IOException {
getLogger().debug("Performing action: Changing encodings on " + tableName);
// possible DataBlockEncoding id's
final int[] possibleIds = { 0, 2, 3, 4, 7 };
modifyAllTableColumns(tableName, (columnName, columnBuilder) -> {
short id = (short) possibleIds[random.nextInt(possibleIds.length)];
DataBlockEncoding encoding = DataBlockEncoding.getEncodingById(id);
columnBuilder.setDataBlockEncoding(encoding);
getLogger().debug("Set encoding of column family " + columnName + " to: " + encoding);
});
}
use of org.apache.hadoop.hbase.io.encoding.DataBlockEncoding in project hbase by apache.
the class IntegrationTestLazyCfLoading method createTable.
private void createTable() throws Exception {
deleteTable();
LOG.info("Creating table");
Configuration conf = util.getConfiguration();
String encodingKey = String.format(ENCODING_KEY, this.getClass().getSimpleName());
DataBlockEncoding blockEncoding = DataBlockEncoding.valueOf(conf.get(encodingKey, "FAST_DIFF"));
TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(TABLE_NAME);
for (byte[] cf : dataGen.getColumnFamilies()) {
ColumnFamilyDescriptor familyDescriptor = ColumnFamilyDescriptorBuilder.newBuilder(cf).setDataBlockEncoding(blockEncoding).build();
builder.setColumnFamily(familyDescriptor);
}
int serverCount = util.getHBaseClusterInterface().getClusterMetrics().getLiveServerMetrics().size();
byte[][] splits = new RegionSplitter.HexStringSplit().split(serverCount * REGIONS_PER_SERVER);
util.getAdmin().createTable(builder.build(), splits);
LOG.info("Created table");
}
use of org.apache.hadoop.hbase.io.encoding.DataBlockEncoding in project hbase by apache.
the class LruAdaptiveBlockCache method getEncodingCountsForTest.
public Map<DataBlockEncoding, Integer> getEncodingCountsForTest() {
Map<DataBlockEncoding, Integer> counts = new EnumMap<>(DataBlockEncoding.class);
for (LruCachedBlock block : map.values()) {
DataBlockEncoding encoding = ((HFileBlock) block.getBuffer()).getDataBlockEncoding();
Integer count = counts.get(encoding);
counts.put(encoding, (count == null ? 0 : count) + 1);
}
return counts;
}
use of org.apache.hadoop.hbase.io.encoding.DataBlockEncoding in project hbase by apache.
the class HBaseTestingUtil method generateColumnDescriptors.
/**
* Create a set of column descriptors with the combination of compression, encoding, bloom codecs
* available.
* @param prefix family names prefix
* @return the list of column descriptors
*/
public static List<ColumnFamilyDescriptor> generateColumnDescriptors(final String prefix) {
List<ColumnFamilyDescriptor> columnFamilyDescriptors = new ArrayList<>();
long familyId = 0;
for (Compression.Algorithm compressionType : getSupportedCompressionAlgorithms()) {
for (DataBlockEncoding encodingType : DataBlockEncoding.values()) {
for (BloomType bloomType : BloomType.values()) {
String name = String.format("%s-cf-!@#&-%d!@#", prefix, familyId);
ColumnFamilyDescriptorBuilder columnFamilyDescriptorBuilder = ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes(name));
columnFamilyDescriptorBuilder.setCompressionType(compressionType);
columnFamilyDescriptorBuilder.setDataBlockEncoding(encodingType);
columnFamilyDescriptorBuilder.setBloomFilterType(bloomType);
columnFamilyDescriptors.add(columnFamilyDescriptorBuilder.build());
familyId++;
}
}
}
return columnFamilyDescriptors;
}
Aggregations