use of org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo in project hbase by apache.
the class ExportSnapshot method getSnapshotFiles.
// ==========================================================================
// Input Format
// ==========================================================================
/**
* Extract the list of files (HFiles/WALs) to copy using Map-Reduce.
* @return list of files referenced by the snapshot (pair of path and size)
*/
private static List<Pair<SnapshotFileInfo, Long>> getSnapshotFiles(final Configuration conf, final FileSystem fs, final Path snapshotDir) throws IOException {
SnapshotDescription snapshotDesc = SnapshotDescriptionUtils.readSnapshotInfo(fs, snapshotDir);
final List<Pair<SnapshotFileInfo, Long>> files = new ArrayList<>();
final TableName table = TableName.valueOf(snapshotDesc.getTable());
// Get snapshot files
LOG.info("Loading Snapshot '" + snapshotDesc.getName() + "' hfile list");
SnapshotReferenceUtil.visitReferencedFiles(conf, fs, snapshotDir, snapshotDesc, new SnapshotReferenceUtil.SnapshotVisitor() {
@Override
public void storeFile(final HRegionInfo regionInfo, final String family, final SnapshotRegionManifest.StoreFile storeFile) throws IOException {
// for storeFile.hasReference() case, copied as part of the manifest
if (!storeFile.hasReference()) {
String region = regionInfo.getEncodedName();
String hfile = storeFile.getName();
Path path = HFileLink.createPath(table, region, family, hfile);
SnapshotFileInfo fileInfo = SnapshotFileInfo.newBuilder().setType(SnapshotFileInfo.Type.HFILE).setHfile(path.toString()).build();
long size;
if (storeFile.hasFileSize()) {
size = storeFile.getFileSize();
} else {
size = HFileLink.buildFromHFileLinkPattern(conf, path).getFileStatus(fs).getLen();
}
files.add(new Pair<>(fileInfo, size));
}
}
});
return files;
}
use of org.apache.hadoop.hbase.shaded.protobuf.generated.SnapshotProtos.SnapshotFileInfo in project hbase by apache.
the class TestExportSnapshotHelpers method testBalanceSplit.
/**
* Verfy the result of getBalanceSplits() method.
* The result are groups of files, used as input list for the "export" mappers.
* All the groups should have similar amount of data.
*
* The input list is a pair of file path and length.
* The getBalanceSplits() function sort it by length,
* and assign to each group a file, going back and forth through the groups.
*/
@Test
public void testBalanceSplit() throws Exception {
// Create a list of files
List<Pair<SnapshotFileInfo, Long>> files = new ArrayList<>(21);
for (long i = 0; i <= 20; i++) {
SnapshotFileInfo fileInfo = SnapshotFileInfo.newBuilder().setType(SnapshotFileInfo.Type.HFILE).setHfile("file-" + i).build();
files.add(new Pair<>(fileInfo, i));
}
// Create 5 groups (total size 210)
// group 0: 20, 11, 10, 1 (total size: 42)
// group 1: 19, 12, 9, 2 (total size: 42)
// group 2: 18, 13, 8, 3 (total size: 42)
// group 3: 17, 12, 7, 4 (total size: 42)
// group 4: 16, 11, 6, 5 (total size: 42)
List<List<Pair<SnapshotFileInfo, Long>>> splits = ExportSnapshot.getBalancedSplits(files, 5);
assertEquals(5, splits.size());
String[] split0 = new String[] { "file-20", "file-11", "file-10", "file-1", "file-0" };
verifyBalanceSplit(splits.get(0), split0, 42);
String[] split1 = new String[] { "file-19", "file-12", "file-9", "file-2" };
verifyBalanceSplit(splits.get(1), split1, 42);
String[] split2 = new String[] { "file-18", "file-13", "file-8", "file-3" };
verifyBalanceSplit(splits.get(2), split2, 42);
String[] split3 = new String[] { "file-17", "file-14", "file-7", "file-4" };
verifyBalanceSplit(splits.get(3), split3, 42);
String[] split4 = new String[] { "file-16", "file-15", "file-6", "file-5" };
verifyBalanceSplit(splits.get(4), split4, 42);
}
Aggregations