use of org.apache.hadoop.hbase.util.Triple in project hbase by apache.
the class CatalogJanitor method getMergedRegionsAndSplitParents.
/**
* Scans hbase:meta and returns a number of scanned rows, and a map of merged
* regions, and an ordered map of split parents. if the given table name is
* null, return merged regions and split parents of all tables, else only the
* specified table
* @param tableName null represents all tables
* @return triple of scanned rows, and map of merged regions, and map of split
* parent regioninfos
* @throws IOException
*/
Triple<Integer, Map<HRegionInfo, Result>, Map<HRegionInfo, Result>> getMergedRegionsAndSplitParents(final TableName tableName) throws IOException {
final boolean isTableSpecified = (tableName != null);
// TODO: Only works with single hbase:meta region currently. Fix.
final AtomicInteger count = new AtomicInteger(0);
// Keep Map of found split parents. There are candidates for cleanup.
// Use a comparator that has split parents come before its daughters.
final Map<HRegionInfo, Result> splitParents = new TreeMap<>(new SplitParentFirstComparator());
final Map<HRegionInfo, Result> mergedRegions = new TreeMap<>();
// This visitor collects split parents and counts rows in the hbase:meta table
MetaTableAccessor.Visitor visitor = new MetaTableAccessor.Visitor() {
@Override
public boolean visit(Result r) throws IOException {
if (r == null || r.isEmpty())
return true;
count.incrementAndGet();
HRegionInfo info = MetaTableAccessor.getHRegionInfo(r);
// Keep scanning
if (info == null)
return true;
if (isTableSpecified && info.getTable().compareTo(tableName) > 0) {
// Another table, stop scanning
return false;
}
if (info.isSplitParent())
splitParents.put(info, r);
if (r.getValue(HConstants.CATALOG_FAMILY, HConstants.MERGEA_QUALIFIER) != null) {
mergedRegions.put(info, r);
}
// Returning true means "keep scanning"
return true;
}
};
// Run full scan of hbase:meta catalog table passing in our custom visitor with
// the start row
MetaTableAccessor.scanMetaForTableRegions(this.connection, visitor, tableName);
return new Triple<>(count.get(), mergedRegions, splitParents);
}
use of org.apache.hadoop.hbase.util.Triple in project hbase by apache.
the class TestFavoredNodeAssignmentHelper method secondaryAndTertiaryRSPlacementHelper.
private Triple<Map<HRegionInfo, ServerName>, FavoredNodeAssignmentHelper, List<HRegionInfo>> secondaryAndTertiaryRSPlacementHelper(int regionCount, Map<String, Integer> rackToServerCount) {
Map<HRegionInfo, ServerName> primaryRSMap = new HashMap<HRegionInfo, ServerName>();
List<ServerName> servers = getServersFromRack(rackToServerCount);
FavoredNodeAssignmentHelper helper = new FavoredNodeAssignmentHelper(servers, rackManager);
Map<ServerName, List<HRegionInfo>> assignmentMap = new HashMap<ServerName, List<HRegionInfo>>();
helper.initialize();
// create regions
List<HRegionInfo> regions = new ArrayList<>(regionCount);
for (int i = 0; i < regionCount; i++) {
HRegionInfo region = new HRegionInfo(TableName.valueOf(name.getMethodName()), Bytes.toBytes(i), Bytes.toBytes(i + 1));
regions.add(region);
}
// place the regions
helper.placePrimaryRSAsRoundRobin(assignmentMap, primaryRSMap, regions);
return new Triple<>(primaryRSMap, helper, regions);
}
use of org.apache.hadoop.hbase.util.Triple in project hbase by apache.
the class TestInterfaceAudienceAnnotations method findProtoInParamType.
private void findProtoInParamType(Class<?> clazz, List<Triple<Class<?>, Method, Class<?>>> protosParamType) {
Triple<Class<?>, Method, Class<?>> paramType = new Triple<>();
Method[] methods = clazz.getMethods();
paramType.setFirst(clazz);
for (Method method : methods) {
if (clazz.isInterface() || method.getModifiers() == Modifier.PUBLIC) {
if (!isInterfacePrivateMethod(method)) {
Class<?>[] parameters = method.getParameterTypes();
for (Class<?> param : parameters) {
if (param.getName().contains(HBASE_PROTOBUF)) {
paramType.setSecond(method);
paramType.setThird(param);
protosParamType.add(paramType);
break;
}
}
}
}
}
}
Aggregations