Search in sources :

Example 76 with Configuration

use of org.apache.hadoop.conf.Configuration in project hadoop by apache.

the class TestHarFileSystemBasics method testPositiveNewHarFsOnTheSameUnderlyingFs.

@Test
public void testPositiveNewHarFsOnTheSameUnderlyingFs() throws Exception {
    // Init 2nd har file system on the same underlying FS, so the
    // metadata gets reused:
    final HarFileSystem hfs = new HarFileSystem(localFileSystem);
    final URI uri = new URI("har://" + harPath.toString());
    hfs.initialize(uri, new Configuration());
    // the metadata should be reused from cache:
    assertTrue(hfs.getMetadata() == harFileSystem.getMetadata());
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) URI(java.net.URI) Test(org.junit.Test)

Example 77 with Configuration

use of org.apache.hadoop.conf.Configuration in project hadoop by apache.

the class TestHarFileSystemBasics method testPositiveInitWithoutUnderlyingFS.

@Test
public void testPositiveInitWithoutUnderlyingFS() throws Exception {
    // Init HarFS with no constructor arg, so that the underlying FS object
    // is created on demand or got from cache in #initialize() method.
    final HarFileSystem hfs = new HarFileSystem();
    final URI uri = new URI("har://" + harPath.toString());
    hfs.initialize(uri, new Configuration());
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) URI(java.net.URI) Test(org.junit.Test)

Example 78 with Configuration

use of org.apache.hadoop.conf.Configuration in project hadoop by apache.

the class TestHarFileSystemBasics method testListLocatedStatus.

@Test
public void testListLocatedStatus() throws Exception {
    String testHarPath = this.getClass().getResource("/test.har").getPath();
    URI uri = new URI("har://" + testHarPath);
    HarFileSystem hfs = new HarFileSystem(localFileSystem);
    hfs.initialize(uri, new Configuration());
    // test.har has the following contents:
    //   dir1/1.txt
    //   dir1/2.txt
    Set<String> expectedFileNames = new HashSet<String>();
    expectedFileNames.add("1.txt");
    expectedFileNames.add("2.txt");
    // List contents of dir, and ensure we find all expected files
    Path path = new Path("dir1");
    RemoteIterator<LocatedFileStatus> fileList = hfs.listLocatedStatus(path);
    while (fileList.hasNext()) {
        String fileName = fileList.next().getPath().getName();
        assertTrue(fileName + " not in expected files list", expectedFileNames.contains(fileName));
        expectedFileNames.remove(fileName);
    }
    assertEquals("Didn't find all of the expected file names: " + expectedFileNames, 0, expectedFileNames.size());
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) URI(java.net.URI) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 79 with Configuration

use of org.apache.hadoop.conf.Configuration in project hadoop by apache.

the class TestFsShellReturnCode method testInterrupt.

@Test(timeout = 30000)
public void testInterrupt() throws Exception {
    MyFsShell shell = new MyFsShell();
    shell.setConf(new Configuration());
    final Path d = new Path(TEST_ROOT_DIR, "testInterrupt");
    final Path f1 = new Path(d, "f1");
    final Path f2 = new Path(d, "f2");
    assertTrue(fileSys.mkdirs(d));
    writeFile(fileSys, f1);
    assertTrue(fileSys.isFile(f1));
    writeFile(fileSys, f2);
    assertTrue(fileSys.isFile(f2));
    int exitCode = shell.run(new String[] { "-testInterrupt", f1.toString(), f2.toString() });
    // processing a file throws an interrupt, it should blow on first file
    assertEquals(1, InterruptCommand.processed);
    assertEquals(130, exitCode);
    exitCode = shell.run(new String[] { "-testInterrupt", d.toString() });
    // processing a file throws an interrupt, it should blow on file
    // after descent into dir
    assertEquals(2, InterruptCommand.processed);
    assertEquals(130, exitCode);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) Test(org.junit.Test)

Example 80 with Configuration

use of org.apache.hadoop.conf.Configuration in project hadoop by apache.

the class TestFsShellTouch method setup.

@BeforeClass
public static void setup() throws Exception {
    Configuration conf = new Configuration();
    shell = new FsShell(conf);
    lfs = FileSystem.getLocal(conf);
    testRootDir = lfs.makeQualified(new Path(GenericTestUtils.getTempPath("testFsShell")));
    lfs.mkdirs(testRootDir);
    lfs.setWorkingDirectory(testRootDir);
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) BeforeClass(org.junit.BeforeClass)

Aggregations

Configuration (org.apache.hadoop.conf.Configuration)5973 Test (org.junit.Test)3243 Path (org.apache.hadoop.fs.Path)1602 FileSystem (org.apache.hadoop.fs.FileSystem)903 IOException (java.io.IOException)850 YarnConfiguration (org.apache.hadoop.yarn.conf.YarnConfiguration)727 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)517 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)502 File (java.io.File)499 HBaseConfiguration (org.apache.hadoop.hbase.HBaseConfiguration)388 ArrayList (java.util.ArrayList)360 URI (java.net.URI)319 BeforeClass (org.junit.BeforeClass)275 Job (org.apache.hadoop.mapreduce.Job)272 Before (org.junit.Before)264 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)219 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)203 HashMap (java.util.HashMap)192 FileStatus (org.apache.hadoop.fs.FileStatus)190 Properties (java.util.Properties)187