use of org.apache.hadoop.hbase.backup.BackupInfo in project hbase by apache.
the class BackupUtils method loadBackupInfo.
public static BackupInfo loadBackupInfo(Path backupRootPath, String backupId, FileSystem fs) throws IOException {
Path backupPath = new Path(backupRootPath, backupId);
RemoteIterator<LocatedFileStatus> it = fs.listFiles(backupPath, true);
while (it.hasNext()) {
LocatedFileStatus lfs = it.next();
if (lfs.getPath().getName().equals(BackupManifest.MANIFEST_FILE_NAME)) {
// Load BackupManifest
BackupManifest manifest = new BackupManifest(fs, lfs.getPath().getParent());
BackupInfo info = manifest.toBackupInfo();
return info;
}
}
return null;
}
use of org.apache.hadoop.hbase.backup.BackupInfo in project hbase by apache.
the class BackupSystemTable method getBackupHistory.
/**
* Get all completed backup information (in desc order by time)
* @param onlyCompleted true, if only successfully completed sessions
* @return history info of BackupCompleteData
* @throws IOException exception
*/
public ArrayList<BackupInfo> getBackupHistory(boolean onlyCompleted) throws IOException {
if (LOG.isTraceEnabled()) {
LOG.trace("get backup history from backup system table");
}
ArrayList<BackupInfo> list;
BackupState state = onlyCompleted ? BackupState.COMPLETE : BackupState.ANY;
list = getBackupInfos(state);
return BackupUtils.sortHistoryListDesc(list);
}
use of org.apache.hadoop.hbase.backup.BackupInfo in project hbase by apache.
the class BackupUtils method getHistory.
private static List<BackupInfo> getHistory(Configuration conf, Path backupRootPath) throws IOException {
// Get all (n) history from backup root destination
FileSystem fs = FileSystem.get(conf);
RemoteIterator<LocatedFileStatus> it = fs.listLocatedStatus(backupRootPath);
List<BackupInfo> infos = new ArrayList<BackupInfo>();
while (it.hasNext()) {
LocatedFileStatus lfs = it.next();
if (!lfs.isDirectory())
continue;
String backupId = lfs.getPath().getName();
try {
BackupInfo info = loadBackupInfo(backupRootPath, backupId, fs);
infos.add(info);
} catch (IOException e) {
LOG.error("Can not load backup info from: " + lfs.getPath(), e);
}
}
// Sort
Collections.sort(infos, new Comparator<BackupInfo>() {
@Override
public int compare(BackupInfo o1, BackupInfo o2) {
long ts1 = getTimestamp(o1.getBackupId());
long ts2 = getTimestamp(o2.getBackupId());
if (ts1 == ts2)
return 0;
return ts1 < ts2 ? 1 : -1;
}
private long getTimestamp(String backupId) {
String[] split = backupId.split("_");
return Long.parseLong(split[1]);
}
});
return infos;
}
use of org.apache.hadoop.hbase.backup.BackupInfo in project hbase by apache.
the class BackupUtils method sortHistoryListDesc.
/**
* Sort history list by start time in descending order.
* @param historyList history list
* @return sorted list of BackupCompleteData
*/
public static ArrayList<BackupInfo> sortHistoryListDesc(ArrayList<BackupInfo> historyList) {
ArrayList<BackupInfo> list = new ArrayList<BackupInfo>();
TreeMap<String, BackupInfo> map = new TreeMap<String, BackupInfo>();
for (BackupInfo h : historyList) {
map.put(Long.toString(h.getStartTs()), h);
}
Iterator<String> i = map.descendingKeySet().iterator();
while (i.hasNext()) {
list.add(map.get(i.next()));
}
return list;
}
use of org.apache.hadoop.hbase.backup.BackupInfo in project hbase by apache.
the class BackupUtils method getHistory.
public static List<BackupInfo> getHistory(Configuration conf, int n, Path backupRootPath, BackupInfo.Filter... filters) throws IOException {
List<BackupInfo> infos = getHistory(conf, backupRootPath);
List<BackupInfo> ret = new ArrayList<BackupInfo>();
for (BackupInfo info : infos) {
if (ret.size() == n) {
break;
}
boolean passed = true;
for (int i = 0; i < filters.length; i++) {
if (!filters[i].apply(info)) {
passed = false;
break;
}
}
if (passed) {
ret.add(info);
}
}
return ret;
}
Aggregations