use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestHadoopArchives method testPathWithSpaces.
@Test
public void testPathWithSpaces() throws Exception {
// create files/directories with spaces
createFile(inputPath, fs, "c c");
final Path sub1 = new Path(inputPath, "sub 1");
fs.mkdirs(sub1);
createFile(sub1, fs, "file x y z");
createFile(sub1, fs, "file");
createFile(sub1, fs, "x");
createFile(sub1, fs, "y");
createFile(sub1, fs, "z");
final Path sub2 = new Path(inputPath, "sub 1 with suffix");
fs.mkdirs(sub2);
createFile(sub2, fs, "z");
final FsShell shell = new FsShell(conf);
final String inputPathStr = inputPath.toUri().getPath();
final List<String> originalPaths = lsr(shell, inputPathStr);
// make the archive:
final String fullHarPathStr = makeArchive();
// compare results
final List<String> harPaths = lsr(shell, fullHarPathStr);
Assert.assertEquals(originalPaths, harPaths);
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestHadoopArchives method testRelativePath.
@Test
public void testRelativePath() throws Exception {
final Path sub1 = new Path(inputPath, "dir1");
fs.mkdirs(sub1);
createFile(inputPath, fs, sub1.getName(), "a");
final FsShell shell = new FsShell(conf);
final List<String> originalPaths = lsr(shell, "input");
System.out.println("originalPaths: " + originalPaths);
// make the archive:
final String fullHarPathStr = makeArchive();
// compare results:
final List<String> harPaths = lsr(shell, fullHarPathStr);
Assert.assertEquals(originalPaths, harPaths);
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestHadoopArchives method testGlobFiles.
@Test
public void testGlobFiles() throws Exception {
final Path sub1 = new Path(inputPath, "dir1");
final Path sub2 = new Path(inputPath, "dir2");
fs.mkdirs(sub1);
String fileName = "a";
createFile(inputPath, fs, sub1.getName(), fileName);
createFile(inputPath, fs, sub2.getName(), fileName);
// not part of result
createFile(inputPath, fs, sub1.getName(), "b");
final String glob = "dir{1,2}/a";
final FsShell shell = new FsShell(conf);
final List<String> originalPaths = lsr(shell, inputPath.toString(), inputPath + "/" + glob);
System.out.println("originalPaths: " + originalPaths);
// make the archive:
final String fullHarPathStr = makeArchive(inputPath, glob);
// compare results:
final List<String> harPaths = lsr(shell, fullHarPathStr, fullHarPathStr + "/" + glob);
Assert.assertEquals(originalPaths, harPaths);
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestDistCpSyncReverseBase method testAndVerify.
private void testAndVerify(int numDeletedAndModified) throws Exception {
SnapshotDiffReport report = dfs.getSnapshotDiffReport(target, "s2", "s1");
System.out.println(report);
final FsShell shell = new FsShell(conf);
lsrSource("Before sync source: ", shell, source);
lsr("Before sync target: ", shell, target);
DistCpSync distCpSync = new DistCpSync(options, conf);
// do the sync
distCpSync.sync();
lsr("After sync target: ", shell, target);
// make sure the source path has been updated to the snapshot path
final Path spath = new Path(source, HdfsConstants.DOT_SNAPSHOT_DIR + Path.SEPARATOR + "s1");
Assert.assertEquals(spath, options.getSourcePaths().get(0));
// build copy listing
final Path listingPath = new Path("/tmp/META/fileList.seq");
CopyListing listing = new SimpleCopyListing(conf, new Credentials(), distCpSync);
listing.buildListing(listingPath, options);
Map<Text, CopyListingFileStatus> copyListing = getListing(listingPath);
CopyMapper copyMapper = new CopyMapper();
StubContext stubContext = new StubContext(conf, null, 0);
Mapper<Text, CopyListingFileStatus, Text, Text>.Context<Text, CopyListingFileStatus, Text, Text> context = stubContext.getContext();
// Enable append
context.getConfiguration().setBoolean(DistCpOptionSwitch.APPEND.getConfigLabel(), true);
copyMapper.setup(context);
for (Map.Entry<Text, CopyListingFileStatus> entry : copyListing.entrySet()) {
copyMapper.map(entry.getKey(), entry.getValue(), context);
}
// verify that we only list modified and created files/directories
Assert.assertEquals(numDeletedAndModified, copyListing.size());
lsr("After Copy target: ", shell, target);
// verify the source and target now has the same structure
verifyCopy(dfs.getFileStatus(spath), dfs.getFileStatus(target), false);
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestDistCpSyncReverseBase method syncAndVerify.
private void syncAndVerify() throws Exception {
final FsShell shell = new FsShell(conf);
lsrSource("Before sync source: ", shell, source);
lsr("Before sync target: ", shell, target);
Assert.assertTrue(sync());
lsrSource("After sync source: ", shell, source);
lsr("After sync target: ", shell, target);
verifyCopy(dfs.getFileStatus(source), dfs.getFileStatus(target), false);
}
Aggregations