use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestFileTruncate method runTruncateShellCommand.
private void runTruncateShellCommand(Path src, int oldLength, String[] shellOpts) throws Exception {
// create file and write data
writeContents(AppendTestUtil.initBuffer(oldLength), oldLength, src);
assertThat(fs.getFileStatus(src).getLen(), is((long) oldLength));
// truncate file using shell
FsShell shell = null;
try {
shell = new FsShell(conf);
assertThat(ToolRunner.run(shell, shellOpts), is(0));
} finally {
if (shell != null) {
shell.close();
}
}
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestNestedEncryptionZones method testNestedEZWithRoot.
@Test(timeout = 60000)
public void testNestedEZWithRoot() throws Exception {
initTopEZDirAndNestedEZDir(rootDir);
verifyEncryption();
// test rename file
renameChildrenOfEZ();
final String currentUser = UserGroupInformation.getCurrentUser().getShortUserName();
final Path suffixTrashPath = new Path(FileSystem.TRASH_PREFIX, currentUser);
final Path rootTrash = fs.getTrashRoot(rootDir);
final Path topEZTrash = fs.getTrashRoot(topEZFile);
final Path nestedEZTrash = fs.getTrashRoot(nestedEZFile);
final Path expectedTopEZTrash = fs.makeQualified(new Path(topEZDir, suffixTrashPath));
final Path expectedNestedEZTrash = fs.makeQualified(new Path(nestedEZDir, suffixTrashPath));
assertEquals("Top ez trash should be " + expectedTopEZTrash, expectedTopEZTrash, topEZTrash);
assertEquals("Root trash should be equal with TopEZFile trash", topEZTrash, rootTrash);
assertEquals("Nested ez Trash should be " + expectedNestedEZTrash, expectedNestedEZTrash, nestedEZTrash);
// delete rename file and test trash
FsShell shell = new FsShell(fs.getConf());
final Path topTrashFile = new Path(shell.getCurrentTrashDir(topEZFile) + "/" + topEZFile);
final Path nestedTrashFile = new Path(shell.getCurrentTrashDir(nestedEZFile) + "/" + nestedEZFile);
ToolRunner.run(shell, new String[] { "-rm", topEZFile.toString() });
ToolRunner.run(shell, new String[] { "-rm", nestedEZFile.toString() });
assertTrue("File not in trash : " + topTrashFile, fs.exists(topTrashFile));
assertTrue("File not in trash : " + nestedTrashFile, fs.exists(nestedTrashFile));
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class Gridmix method writeInputData.
/**
* Write random bytes at the path <inputDir> if needed.
* @see org.apache.hadoop.mapred.gridmix.GenerateData
* @return exit status
*/
protected int writeInputData(long genbytes, Path inputDir) throws IOException, InterruptedException {
if (genbytes > 0) {
final Configuration conf = getConf();
if (inputDir.getFileSystem(conf).exists(inputDir)) {
LOG.error("Gridmix input data directory " + inputDir + " already exists when -generate option is used.\n");
return STARTUP_FAILED_ERROR;
}
// configure the compression ratio if needed
CompressionEmulationUtil.setupDataGeneratorConfig(conf);
final GenerateData genData = new GenerateData(conf, inputDir, genbytes);
LOG.info("Generating " + StringUtils.humanReadableInt(genbytes) + " of test data...");
launchGridmixJob(genData);
FsShell shell = new FsShell(conf);
try {
LOG.info("Changing the permissions for inputPath " + inputDir.toString());
shell.run(new String[] { "-chmod", "-R", "777", inputDir.toString() });
} catch (Exception e) {
LOG.error("Couldnt change the file permissions ", e);
throw new IOException(e);
}
LOG.info("Input data generation successful.");
}
return 0;
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestHadoopArchives method testSingleFile.
@Test
public void testSingleFile() throws Exception {
final Path sub1 = new Path(inputPath, "dir1");
fs.mkdirs(sub1);
String singleFileName = "a";
createFile(inputPath, fs, sub1.getName(), singleFileName);
final FsShell shell = new FsShell(conf);
final List<String> originalPaths = lsr(shell, sub1.toString());
System.out.println("originalPaths: " + originalPaths);
// make the archive:
final String fullHarPathStr = makeArchive(sub1, singleFileName);
// compare results:
final List<String> harPaths = lsr(shell, fullHarPathStr);
Assert.assertEquals(originalPaths, harPaths);
}
use of org.apache.hadoop.fs.FsShell in project hadoop by apache.
the class TestHadoopArchives method testRelativePathWitRepl.
@Test
public void testRelativePathWitRepl() throws Exception {
final Path sub1 = new Path(inputPath, "dir1");
fs.mkdirs(sub1);
createFile(inputPath, fs, sub1.getName(), "a");
final FsShell shell = new FsShell(conf);
final List<String> originalPaths = lsr(shell, "input");
System.out.println("originalPaths: " + originalPaths);
// make the archive:
final String fullHarPathStr = makeArchiveWithRepl();
// compare results:
final List<String> harPaths = lsr(shell, fullHarPathStr);
Assert.assertEquals(originalPaths, harPaths);
}
Aggregations