Search in sources :

Example 61 with HdfsConfiguration

use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.

the class TestSymlinkHdfsDisable method testSymlinkHdfsDisable.

@Test(timeout = 60000)
public void testSymlinkHdfsDisable() throws Exception {
    Configuration conf = new HdfsConfiguration();
    // disable symlink resolution
    conf.setBoolean(CommonConfigurationKeys.FS_CLIENT_RESOLVE_REMOTE_SYMLINKS_KEY, false);
    // spin up minicluster, get dfs and filecontext
    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).build();
    DistributedFileSystem dfs = cluster.getFileSystem();
    FileContext fc = FileContext.getFileContext(cluster.getURI(0), conf);
    // Create test files/links
    FileContextTestHelper helper = new FileContextTestHelper("/tmp/TestSymlinkHdfsDisable");
    Path root = helper.getTestRootPath(fc);
    Path target = new Path(root, "target");
    Path link = new Path(root, "link");
    DFSTestUtil.createFile(dfs, target, 4096, (short) 1, 0xDEADDEAD);
    fc.createSymlink(target, link, false);
    // Try to resolve links with FileSystem and FileContext
    try {
        fc.open(link);
        fail("Expected error when attempting to resolve link");
    } catch (IOException e) {
        GenericTestUtils.assertExceptionContains("resolution is disabled", e);
    }
    try {
        dfs.open(link);
        fail("Expected error when attempting to resolve link");
    } catch (IOException e) {
        GenericTestUtils.assertExceptionContains("resolution is disabled", e);
    }
}
Also used : MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Configuration(org.apache.hadoop.conf.Configuration) IOException(java.io.IOException) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) Test(org.junit.Test)

Example 62 with HdfsConfiguration

use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.

the class TestUrlStreamHandler method testFileUrls.

/**
   * Test opening and reading from an InputStream through a file:// URL.
   * 
   * @throws IOException
   * @throws URISyntaxException
   */
@Test
public void testFileUrls() throws IOException, URISyntaxException {
    // URLStreamHandler is already set in JVM by testDfsUrls() 
    Configuration conf = new HdfsConfiguration();
    // Locate the test temporary directory.
    if (!TEST_ROOT_DIR.exists()) {
        if (!TEST_ROOT_DIR.mkdirs())
            throw new IOException("Cannot create temporary directory: " + TEST_ROOT_DIR);
    }
    File tmpFile = new File(TEST_ROOT_DIR, "thefile");
    URI uri = tmpFile.toURI();
    FileSystem fs = FileSystem.get(uri, conf);
    try {
        byte[] fileContent = new byte[1024];
        for (int i = 0; i < fileContent.length; ++i) fileContent[i] = (byte) i;
        // First create the file through the FileSystem API
        OutputStream os = fs.create(new Path(uri.getPath()));
        os.write(fileContent);
        os.close();
        // Second, open and read the file content through the URL API.
        URL fileURL = uri.toURL();
        InputStream is = fileURL.openStream();
        assertNotNull(is);
        byte[] bytes = new byte[4096];
        assertEquals(1024, is.read(bytes));
        is.close();
        for (int i = 0; i < fileContent.length; ++i) assertEquals(fileContent[i], bytes[i]);
        // Cleanup: delete the file
        fs.delete(new Path(uri.getPath()), false);
    } finally {
        fs.close();
    }
}
Also used : HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Configuration(org.apache.hadoop.conf.Configuration) InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) IOException(java.io.IOException) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) File(java.io.File) URI(java.net.URI) URL(java.net.URL) Test(org.junit.Test)

Example 63 with HdfsConfiguration

use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.

the class TestUrlStreamHandler method testDfsUrls.

/**
   * Test opening and reading from an InputStream through a hdfs:// URL.
   * <p>
   * First generate a file with some content through the FileSystem API, then
   * try to open and read the file through the URL stream API.
   * 
   * @throws IOException
   */
@Test
public void testDfsUrls() throws IOException {
    Configuration conf = new HdfsConfiguration();
    MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
    FileSystem fs = cluster.getFileSystem();
    // Setup our own factory
    // setURLSteramHandlerFactor is can be set at most once in the JVM
    // the new URLStreamHandler is valid for all tests cases 
    // in TestStreamHandler
    FsUrlStreamHandlerFactory factory = new org.apache.hadoop.fs.FsUrlStreamHandlerFactory();
    java.net.URL.setURLStreamHandlerFactory(factory);
    Path filePath = new Path("/thefile");
    try {
        byte[] fileContent = new byte[1024];
        for (int i = 0; i < fileContent.length; ++i) fileContent[i] = (byte) i;
        // First create the file through the FileSystem API
        OutputStream os = fs.create(filePath);
        os.write(fileContent);
        os.close();
        // Second, open and read the file content through the URL API
        URI uri = fs.getUri();
        URL fileURL = new URL(uri.getScheme(), uri.getHost(), uri.getPort(), filePath.toString());
        InputStream is = fileURL.openStream();
        assertNotNull(is);
        byte[] bytes = new byte[4096];
        assertEquals(1024, is.read(bytes));
        is.close();
        for (int i = 0; i < fileContent.length; ++i) assertEquals(fileContent[i], bytes[i]);
        // Cleanup: delete the file
        fs.delete(filePath, false);
    } finally {
        fs.close();
        cluster.shutdown();
    }
}
Also used : MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Configuration(org.apache.hadoop.conf.Configuration) InputStream(java.io.InputStream) OutputStream(java.io.OutputStream) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) URI(java.net.URI) URL(java.net.URL) Test(org.junit.Test)

Example 64 with HdfsConfiguration

use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.

the class TestResolveHdfsSymlink method setUp.

@BeforeClass
public static void setUp() throws IOException {
    Configuration conf = new HdfsConfiguration();
    conf.setBoolean(DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
    cluster = new MiniDFSCluster.Builder(conf).build();
    cluster.waitActive();
}
Also used : HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Configuration(org.apache.hadoop.conf.Configuration) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) BeforeClass(org.junit.BeforeClass)

Example 65 with HdfsConfiguration

use of org.apache.hadoop.hdfs.HdfsConfiguration in project hadoop by apache.

the class TestSymlinkHdfs method beforeClassSetup.

@BeforeClass
public static void beforeClassSetup() throws Exception {
    Configuration conf = new HdfsConfiguration();
    conf.set(FsPermission.UMASK_LABEL, "000");
    conf.setInt(DFSConfigKeys.DFS_NAMENODE_MAX_COMPONENT_LENGTH_KEY, 0);
    cluster = new MiniDFSCluster.Builder(conf).build();
    webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
    dfs = cluster.getFileSystem();
}
Also used : HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) Configuration(org.apache.hadoop.conf.Configuration) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) BeforeClass(org.junit.BeforeClass)

Aggregations

HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)454 Configuration (org.apache.hadoop.conf.Configuration)311 Test (org.junit.Test)311 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)267 Path (org.apache.hadoop.fs.Path)152 FileSystem (org.apache.hadoop.fs.FileSystem)94 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)92 File (java.io.File)72 IOException (java.io.IOException)69 Before (org.junit.Before)56 ExtendedBlock (org.apache.hadoop.hdfs.protocol.ExtendedBlock)40 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)35 MetricsRecordBuilder (org.apache.hadoop.metrics2.MetricsRecordBuilder)33 DataNode (org.apache.hadoop.hdfs.server.datanode.DataNode)30 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)27 RandomAccessFile (java.io.RandomAccessFile)22 ArrayList (java.util.ArrayList)20 NameNodeFile (org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeFile)20 URI (java.net.URI)19 FsPermission (org.apache.hadoop.fs.permission.FsPermission)19