use of org.apache.hadoop.hdfs.protocol.SnapshotDiffReport in project hadoop by apache.
the class TestRenameWithSnapshots method testRenameDirectoryInSnapshot.
@Test(timeout = 60000)
public void testRenameDirectoryInSnapshot() throws Exception {
final Path sub2 = new Path(sub1, "sub2");
final Path sub3 = new Path(sub1, "sub3");
final Path sub2file1 = new Path(sub2, "sub2file1");
final String sub1snap1 = "sub1snap1";
hdfs.mkdirs(sub1);
hdfs.mkdirs(sub2);
DFSTestUtil.createFile(hdfs, sub2file1, BLOCKSIZE, REPL, SEED);
SnapshotTestHelper.createSnapshot(hdfs, sub1, sub1snap1);
// First rename the sub-directory.
hdfs.rename(sub2, sub3);
// Query the diff report and make sure it looks as expected.
SnapshotDiffReport diffReport = hdfs.getSnapshotDiffReport(sub1, sub1snap1, "");
LOG.info("DiffList is \n\"" + diffReport.toString() + "\"");
List<DiffReportEntry> entries = diffReport.getDiffList();
assertEquals(2, entries.size());
assertTrue(existsInDiffReport(entries, DiffType.MODIFY, "", null));
assertTrue(existsInDiffReport(entries, DiffType.RENAME, sub2.getName(), sub3.getName()));
}
use of org.apache.hadoop.hdfs.protocol.SnapshotDiffReport in project hadoop by apache.
the class PBHelperClient method convert.
public static SnapshotDiffReport convert(SnapshotDiffReportProto reportProto) {
if (reportProto == null) {
return null;
}
String snapshotDir = reportProto.getSnapshotRoot();
String fromSnapshot = reportProto.getFromSnapshot();
String toSnapshot = reportProto.getToSnapshot();
List<SnapshotDiffReportEntryProto> list = reportProto.getDiffReportEntriesList();
List<DiffReportEntry> entries = new ArrayList<>();
for (SnapshotDiffReportEntryProto entryProto : list) {
DiffReportEntry entry = convert(entryProto);
if (entry != null)
entries.add(entry);
}
return new SnapshotDiffReport(snapshotDir, fromSnapshot, toSnapshot, entries);
}
use of org.apache.hadoop.hdfs.protocol.SnapshotDiffReport in project hadoop by apache.
the class TestDistCpSyncReverseBase method testSync5.
/**
* Test a case with different delete and rename sequences.
*/
@Test
public void testSync5() throws Exception {
if (isSrcNotSameAsTgt) {
initData5(source);
}
initData5(target);
enableAndCreateFirstSnapshot();
// make changes under target
int numDeletedAndModified = changeData5(target);
createSecondSnapshotAtTarget();
SnapshotDiffReport report = dfs.getSnapshotDiffReport(target, "s2", "s1");
System.out.println(report);
testAndVerify(numDeletedAndModified);
}
use of org.apache.hadoop.hdfs.protocol.SnapshotDiffReport in project hadoop by apache.
the class TestDistCpSyncReverseBase method testSync4.
/**
* Test a case where multiple level dirs are renamed.
*/
@Test
public void testSync4() throws Exception {
if (isSrcNotSameAsTgt) {
initData4(source);
}
initData4(target);
enableAndCreateFirstSnapshot();
final FsShell shell = new FsShell(conf);
lsr("Before change target: ", shell, target);
// make changes under target
int numDeletedAndModified = changeData4(target);
createSecondSnapshotAtTarget();
SnapshotDiffReport report = dfs.getSnapshotDiffReport(target, "s2", "s1");
System.out.println(report);
testAndVerify(numDeletedAndModified);
}
use of org.apache.hadoop.hdfs.protocol.SnapshotDiffReport in project hadoop by apache.
the class TestDistCpSync method testSync.
/**
* Test the basic functionality.
*/
@Test
public void testSync() throws Exception {
initData(source);
initData(target);
enableAndCreateFirstSnapshot();
// make changes under source
int numCreatedModified = changeData(source);
dfs.createSnapshot(source, "s2");
// before sync, make some further changes on source. this should not affect
// the later distcp since we're copying (s2-s1) to target
final Path toDelete = new Path(source, "foo/d1/foo/f1");
dfs.delete(toDelete, true);
final Path newdir = new Path(source, "foo/d1/foo/newdir");
dfs.mkdirs(newdir);
SnapshotDiffReport report = dfs.getSnapshotDiffReport(source, "s1", "s2");
System.out.println(report);
DistCpSync distCpSync = new DistCpSync(options, conf);
// do the sync
Assert.assertTrue(distCpSync.sync());
// make sure the source path has been updated to the snapshot path
final Path spath = new Path(source, HdfsConstants.DOT_SNAPSHOT_DIR + Path.SEPARATOR + "s2");
Assert.assertEquals(spath, options.getSourcePaths().get(0));
// build copy listing
final Path listingPath = new Path("/tmp/META/fileList.seq");
CopyListing listing = new SimpleCopyListing(conf, new Credentials(), distCpSync);
listing.buildListing(listingPath, options);
Map<Text, CopyListingFileStatus> copyListing = getListing(listingPath);
CopyMapper copyMapper = new CopyMapper();
StubContext stubContext = new StubContext(conf, null, 0);
Mapper<Text, CopyListingFileStatus, Text, Text>.Context<Text, CopyListingFileStatus, Text, Text> context = stubContext.getContext();
// Enable append
context.getConfiguration().setBoolean(DistCpOptionSwitch.APPEND.getConfigLabel(), true);
copyMapper.setup(context);
for (Map.Entry<Text, CopyListingFileStatus> entry : copyListing.entrySet()) {
copyMapper.map(entry.getKey(), entry.getValue(), context);
}
// verify that we only list modified and created files/directories
Assert.assertEquals(numCreatedModified, copyListing.size());
// verify that we only copied new appended data of f2 and the new file f1
Assert.assertEquals(BLOCK_SIZE * 3, stubContext.getReporter().getCounter(CopyMapper.Counter.BYTESCOPIED).getValue());
// verify the source and target now has the same structure
verifyCopy(dfs.getFileStatus(spath), dfs.getFileStatus(target), false);
}
Aggregations