use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class DFSTestUtil method FsShellRun.
public static void FsShellRun(String cmd, int retcode, String contain, Configuration conf) throws Exception {
FsShell shell = new FsShell(new Configuration(conf));
toolRun(shell, cmd, retcode, contain);
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestDFSShellGenericOptions method execute.
private void execute(String[] args, String namenode) {
FsShell shell = new FsShell();
FileSystem fs = null;
try {
ToolRunner.run(shell, args);
fs = FileSystem.get(NameNode.getUri(NameNode.getAddress(namenode)), shell.getConf());
assertTrue("Directory does not get created", fs.isDirectory(new Path("/data")));
fs.delete(new Path("/data"), true);
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
} finally {
if (fs != null) {
try {
fs.close();
} catch (IOException ignored) {
}
}
}
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestHadoopArchives method testRelativePath.
@Test
public void testRelativePath() throws Exception {
final Path sub1 = new Path(inputPath, "dir1");
fs.mkdirs(sub1);
createFile(inputPath, fs, sub1.getName(), "a");
final FsShell shell = new FsShell(conf);
final List<String> originalPaths = lsr(shell, "input");
System.out.println("originalPaths: " + originalPaths);
// make the archive:
final String fullHarPathStr = makeArchive();
// compare results:
final List<String> harPaths = lsr(shell, fullHarPathStr);
Assert.assertEquals(originalPaths, harPaths);
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestHadoopArchives method testPathWithSpaces.
@Test
public void testPathWithSpaces() throws Exception {
// create files/directories with spaces
createFile(inputPath, fs, "c c");
final Path sub1 = new Path(inputPath, "sub 1");
fs.mkdirs(sub1);
createFile(sub1, fs, "file x y z");
createFile(sub1, fs, "file");
createFile(sub1, fs, "x");
createFile(sub1, fs, "y");
createFile(sub1, fs, "z");
final Path sub2 = new Path(inputPath, "sub 1 with suffix");
fs.mkdirs(sub2);
createFile(sub2, fs, "z");
final FsShell shell = new FsShell(conf);
final String inputPathStr = inputPath.toUri().getPath();
final List<String> originalPaths = lsr(shell, inputPathStr);
// make the archive:
final String fullHarPathStr = makeArchive();
// compare results
final List<String> harPaths = lsr(shell, fullHarPathStr);
Assert.assertEquals(originalPaths, harPaths);
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestHadoopArchives method testGlobFiles.
@Test
public void testGlobFiles() throws Exception {
final Path sub1 = new Path(inputPath, "dir1");
final Path sub2 = new Path(inputPath, "dir2");
fs.mkdirs(sub1);
String fileName = "a";
createFile(inputPath, fs, sub1.getName(), fileName);
createFile(inputPath, fs, sub2.getName(), fileName);
// not part of result
createFile(inputPath, fs, sub1.getName(), "b");
final String glob = "dir{1,2}/a";
final FsShell shell = new FsShell(conf);
final List<String> originalPaths = lsr(shell, inputPath.toString(), inputPath + "/" + glob);
System.out.println("originalPaths: " + originalPaths);
// make the archive:
final String fullHarPathStr = makeArchive(inputPath, glob);
// compare results:
final List<String> harPaths = lsr(shell, fullHarPathStr, fullHarPathStr + "/" + glob);
Assert.assertEquals(originalPaths, harPaths);
}
Aggregations