use of org.apache.hadoop.fs.shell.PathData in project hadoop by apache.
the class TestFind method processArgumentsDepthFirstMinDepth.
// check min depth is handled when -depth is specified
@Test
public void processArgumentsDepthFirstMinDepth() throws IOException {
LinkedList<PathData> items = createDirectories();
Find find = new Find();
find.getOptions().setDepthFirst(true);
find.getOptions().setMinDepth(1);
find.setConf(conf);
PrintStream out = mock(PrintStream.class);
find.getOptions().setOut(out);
PrintStream err = mock(PrintStream.class);
find.getOptions().setErr(err);
Expression expr = mock(Expression.class);
when(expr.apply((PathData) any(), anyInt())).thenReturn(Result.PASS);
FileStatusChecker fsCheck = mock(FileStatusChecker.class);
Expression test = new TestExpression(expr, fsCheck);
find.setRootExpression(test);
find.processArguments(items);
InOrder inOrder = inOrder(expr);
inOrder.verify(expr).setOptions(find.getOptions());
inOrder.verify(expr).prepare();
inOrder.verify(expr).apply(item1aa, 2);
inOrder.verify(expr).apply(item1a, 1);
inOrder.verify(expr).apply(item1b, 1);
inOrder.verify(expr).apply(item5a, 1);
inOrder.verify(expr).apply(item5b, 1);
inOrder.verify(expr).apply(item5ca, 2);
inOrder.verify(expr).apply(item5c, 1);
inOrder.verify(expr).apply(item5d, 1);
inOrder.verify(expr).apply(item5e, 1);
inOrder.verify(expr).finish();
verifyNoMoreInteractions(expr);
InOrder inOrderFsCheck = inOrder(fsCheck);
inOrderFsCheck.verify(fsCheck).check(item1aa.stat);
inOrderFsCheck.verify(fsCheck).check(item1a.stat);
inOrderFsCheck.verify(fsCheck).check(item1b.stat);
inOrderFsCheck.verify(fsCheck).check(item5a.stat);
inOrderFsCheck.verify(fsCheck).check(item5b.stat);
inOrderFsCheck.verify(fsCheck).check(item5ca.stat);
inOrderFsCheck.verify(fsCheck).check(item5c.stat);
inOrderFsCheck.verify(fsCheck).check(item5d.stat);
inOrderFsCheck.verify(fsCheck).check(item5e.stat);
verifyNoMoreInteractions(fsCheck);
verifyNoMoreInteractions(out);
verifyNoMoreInteractions(err);
}
use of org.apache.hadoop.fs.shell.PathData in project hadoop by apache.
the class TestIname method applyGlobNotMatch.
// test a non-matching glob pattern
@Test
public void applyGlobNotMatch() throws IOException {
setup("n*e");
PathData item = new PathData("/directory/path/notmatch", mockFs.getConf());
assertEquals(Result.FAIL, name.apply(item, -1));
}
use of org.apache.hadoop.fs.shell.PathData in project hadoop by apache.
the class TestName method applyGlob.
// test a matching glob pattern
@Test
public void applyGlob() throws IOException {
setup("n*e");
PathData item = new PathData("/directory/path/name", mockFs.getConf());
assertEquals(Result.PASS, name.apply(item, -1));
}
use of org.apache.hadoop.fs.shell.PathData in project hadoop by apache.
the class TestPrint method testPrint.
// test the full path is printed to stdout
@Test
public void testPrint() throws IOException {
Print print = new Print();
PrintStream out = mock(PrintStream.class);
FindOptions options = new FindOptions();
options.setOut(out);
print.setOptions(options);
String filename = "/one/two/test";
PathData item = new PathData(filename, mockFs.getConf());
assertEquals(Result.PASS, print.apply(item, -1));
verify(out).print(filename + '\n');
verifyNoMoreInteractions(out);
}
use of org.apache.hadoop.fs.shell.PathData in project hadoop by apache.
the class TestPrint0 method testPrint.
// test the full path is printed to stdout with a '\0'
@Test
public void testPrint() throws IOException {
Print.Print0 print = new Print.Print0();
PrintStream out = mock(PrintStream.class);
FindOptions options = new FindOptions();
options.setOut(out);
print.setOptions(options);
String filename = "/one/two/test";
PathData item = new PathData(filename, mockFs.getConf());
assertEquals(Result.PASS, print.apply(item, -1));
verify(out).print(filename + '\0');
verifyNoMoreInteractions(out);
}
Aggregations