Search in sources :

Example 1 with BlockStoragePolicySpi

use of org.apache.hadoop.fs.BlockStoragePolicySpi in project hadoop by apache.

the class DistributedFileSystem method getStoragePolicy.

@Override
public BlockStoragePolicySpi getStoragePolicy(Path path) throws IOException {
    statistics.incrementReadOps(1);
    storageStatistics.incrementOpCounter(OpType.GET_STORAGE_POLICY);
    Path absF = fixRelativePart(path);
    return new FileSystemLinkResolver<BlockStoragePolicySpi>() {

        @Override
        public BlockStoragePolicySpi doCall(final Path p) throws IOException {
            return getClient().getStoragePolicy(getPathName(p));
        }

        @Override
        public BlockStoragePolicySpi next(final FileSystem fs, final Path p) throws IOException {
            return fs.getStoragePolicy(p);
        }
    }.resolve(this, absF);
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) BlockStoragePolicySpi(org.apache.hadoop.fs.BlockStoragePolicySpi) IOException(java.io.IOException)

Example 2 with BlockStoragePolicySpi

use of org.apache.hadoop.fs.BlockStoragePolicySpi in project hadoop by apache.

the class BaseTestHttpFSWith method testStoragePolicy.

private void testStoragePolicy() throws Exception {
    Assume.assumeFalse("Assume its not a local FS", isLocalFS());
    FileSystem fs = FileSystem.get(getProxiedFSConf());
    fs.mkdirs(getProxiedFSTestDir());
    Path path = new Path(getProxiedFSTestDir(), "policy.txt");
    FileSystem httpfs = getHttpFSFileSystem();
    // test getAllStoragePolicies
    BlockStoragePolicy[] dfsPolicies = (BlockStoragePolicy[]) fs.getAllStoragePolicies().toArray();
    BlockStoragePolicy[] httpPolicies = (BlockStoragePolicy[]) httpfs.getAllStoragePolicies().toArray();
    Assert.assertArrayEquals("Policy array returned from the DFS and HttpFS should be equals", dfsPolicies, httpPolicies);
    // test get/set/unset policies
    DFSTestUtil.createFile(fs, path, 0, (short) 1, 0L);
    // get defaultPolicy
    BlockStoragePolicySpi defaultdfsPolicy = fs.getStoragePolicy(path);
    // set policy through webhdfs
    httpfs.setStoragePolicy(path, HdfsConstants.COLD_STORAGE_POLICY_NAME);
    // get policy from dfs
    BlockStoragePolicySpi dfsPolicy = fs.getStoragePolicy(path);
    // get policy from webhdfs
    BlockStoragePolicySpi httpFsPolicy = httpfs.getStoragePolicy(path);
    Assert.assertEquals("Storage policy returned from the get API should" + " be same as set policy", HdfsConstants.COLD_STORAGE_POLICY_NAME.toString(), httpFsPolicy.getName());
    Assert.assertEquals("Storage policy returned from the DFS and HttpFS should be equals", httpFsPolicy, dfsPolicy);
    // unset policy
    httpfs.unsetStoragePolicy(path);
    Assert.assertEquals("After unset storage policy, the get API shoudld" + " return the default policy", defaultdfsPolicy, httpfs.getStoragePolicy(path));
    fs.close();
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) BlockStoragePolicy(org.apache.hadoop.hdfs.protocol.BlockStoragePolicy) BlockStoragePolicySpi(org.apache.hadoop.fs.BlockStoragePolicySpi)

Example 3 with BlockStoragePolicySpi

use of org.apache.hadoop.fs.BlockStoragePolicySpi in project hadoop by apache.

the class TestHdfsAdmin method testHdfsAdminStoragePolicies.

/**
   * Test that we can set, get, unset storage policies via {@link HdfsAdmin}.
   */
@Test
public void testHdfsAdminStoragePolicies() throws Exception {
    HdfsAdmin hdfsAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
    FileSystem fs = FileSystem.get(conf);
    final Path foo = new Path("/foo");
    final Path bar = new Path(foo, "bar");
    final Path wow = new Path(bar, "wow");
    DFSTestUtil.createFile(fs, wow, SIZE, REPL, 0);
    final BlockStoragePolicySuite suite = BlockStoragePolicySuite.createDefaultSuite();
    final BlockStoragePolicy warm = suite.getPolicy("WARM");
    final BlockStoragePolicy cold = suite.getPolicy("COLD");
    final BlockStoragePolicy hot = suite.getPolicy("HOT");
    /*
     * test: set storage policy
     */
    hdfsAdmin.setStoragePolicy(foo, warm.getName());
    hdfsAdmin.setStoragePolicy(bar, cold.getName());
    hdfsAdmin.setStoragePolicy(wow, hot.getName());
    /*
     * test: get storage policy after set
     */
    assertEquals(hdfsAdmin.getStoragePolicy(foo), warm);
    assertEquals(hdfsAdmin.getStoragePolicy(bar), cold);
    assertEquals(hdfsAdmin.getStoragePolicy(wow), hot);
    /*
     * test: unset storage policy
     */
    hdfsAdmin.unsetStoragePolicy(foo);
    hdfsAdmin.unsetStoragePolicy(bar);
    hdfsAdmin.unsetStoragePolicy(wow);
    /*
     * test: get storage policy after unset. HOT by default.
     */
    assertEquals(hdfsAdmin.getStoragePolicy(foo), hot);
    assertEquals(hdfsAdmin.getStoragePolicy(bar), hot);
    assertEquals(hdfsAdmin.getStoragePolicy(wow), hot);
    /*
     * test: get all storage policies
     */
    // Get policies via HdfsAdmin
    Set<String> policyNamesSet1 = new HashSet<>();
    for (BlockStoragePolicySpi policy : hdfsAdmin.getAllStoragePolicies()) {
        policyNamesSet1.add(policy.getName());
    }
    // Get policies via BlockStoragePolicySuite
    Set<String> policyNamesSet2 = new HashSet<>();
    for (BlockStoragePolicy policy : suite.getAllPolicies()) {
        policyNamesSet2.add(policy.getName());
    }
    // Ensure that we got the same set of policies in both cases.
    Assert.assertTrue(Sets.difference(policyNamesSet1, policyNamesSet2).isEmpty());
    Assert.assertTrue(Sets.difference(policyNamesSet2, policyNamesSet1).isEmpty());
}
Also used : Path(org.apache.hadoop.fs.Path) BlockStoragePolicySuite(org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite) HdfsAdmin(org.apache.hadoop.hdfs.client.HdfsAdmin) FileSystem(org.apache.hadoop.fs.FileSystem) BlockStoragePolicy(org.apache.hadoop.hdfs.protocol.BlockStoragePolicy) BlockStoragePolicySpi(org.apache.hadoop.fs.BlockStoragePolicySpi) HashSet(java.util.HashSet) Test(org.junit.Test)

Example 4 with BlockStoragePolicySpi

use of org.apache.hadoop.fs.BlockStoragePolicySpi in project hadoop by apache.

the class TestWebHDFS method testStoragePolicy.

@Test
public void testStoragePolicy() throws Exception {
    MiniDFSCluster cluster = null;
    final Configuration conf = WebHdfsTestUtil.createConf();
    final Path path = new Path("/file");
    try {
        cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
        final DistributedFileSystem dfs = cluster.getFileSystem();
        final WebHdfsFileSystem webHdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
        // test getAllStoragePolicies
        BlockStoragePolicy[] dfsPolicies = (BlockStoragePolicy[]) dfs.getAllStoragePolicies().toArray();
        BlockStoragePolicy[] webHdfsPolicies = (BlockStoragePolicy[]) webHdfs.getAllStoragePolicies().toArray();
        Assert.assertTrue(Arrays.equals(dfsPolicies, webHdfsPolicies));
        // test get/set/unset policies
        DFSTestUtil.createFile(dfs, path, 0, (short) 1, 0L);
        // get defaultPolicy
        BlockStoragePolicySpi defaultdfsPolicy = dfs.getStoragePolicy(path);
        // set policy through webhdfs
        webHdfs.setStoragePolicy(path, HdfsConstants.COLD_STORAGE_POLICY_NAME);
        // get policy from dfs
        BlockStoragePolicySpi dfsPolicy = dfs.getStoragePolicy(path);
        // get policy from webhdfs
        BlockStoragePolicySpi webHdfsPolicy = webHdfs.getStoragePolicy(path);
        Assert.assertEquals(HdfsConstants.COLD_STORAGE_POLICY_NAME.toString(), webHdfsPolicy.getName());
        Assert.assertEquals(webHdfsPolicy, dfsPolicy);
        // unset policy
        webHdfs.unsetStoragePolicy(path);
        Assert.assertEquals(defaultdfsPolicy, webHdfs.getStoragePolicy(path));
    } finally {
        if (cluster != null) {
            cluster.shutdown();
        }
    }
}
Also used : Path(org.apache.hadoop.fs.Path) MiniDFSCluster(org.apache.hadoop.hdfs.MiniDFSCluster) Configuration(org.apache.hadoop.conf.Configuration) HdfsConfiguration(org.apache.hadoop.hdfs.HdfsConfiguration) BlockStoragePolicy(org.apache.hadoop.hdfs.protocol.BlockStoragePolicy) BlockStoragePolicySpi(org.apache.hadoop.fs.BlockStoragePolicySpi) DistributedFileSystem(org.apache.hadoop.hdfs.DistributedFileSystem) Test(org.junit.Test) HttpServerFunctionalTest(org.apache.hadoop.http.HttpServerFunctionalTest)

Example 5 with BlockStoragePolicySpi

use of org.apache.hadoop.fs.BlockStoragePolicySpi in project hadoop by apache.

the class ViewFileSystem method getAllStoragePolicies.

@Override
public Collection<? extends BlockStoragePolicySpi> getAllStoragePolicies() throws IOException {
    Collection<BlockStoragePolicySpi> allPolicies = new HashSet<>();
    for (FileSystem fs : getChildFileSystems()) {
        try {
            Collection<? extends BlockStoragePolicySpi> policies = fs.getAllStoragePolicies();
            allPolicies.addAll(policies);
        } catch (UnsupportedOperationException e) {
        // ignored
        }
    }
    return allPolicies;
}
Also used : FileSystem(org.apache.hadoop.fs.FileSystem) BlockStoragePolicySpi(org.apache.hadoop.fs.BlockStoragePolicySpi) HashSet(java.util.HashSet)

Aggregations

BlockStoragePolicySpi (org.apache.hadoop.fs.BlockStoragePolicySpi)7 FileSystem (org.apache.hadoop.fs.FileSystem)4 Path (org.apache.hadoop.fs.Path)4 BlockStoragePolicy (org.apache.hadoop.hdfs.protocol.BlockStoragePolicy)3 Test (org.junit.Test)3 HashSet (java.util.HashSet)2 IOException (java.io.IOException)1 Configuration (org.apache.hadoop.conf.Configuration)1 StorageType (org.apache.hadoop.fs.StorageType)1 DistributedFileSystem (org.apache.hadoop.hdfs.DistributedFileSystem)1 HdfsConfiguration (org.apache.hadoop.hdfs.HdfsConfiguration)1 MiniDFSCluster (org.apache.hadoop.hdfs.MiniDFSCluster)1 HdfsAdmin (org.apache.hadoop.hdfs.client.HdfsAdmin)1 BlockStoragePolicySuite (org.apache.hadoop.hdfs.server.blockmanagement.BlockStoragePolicySuite)1 HttpServerFunctionalTest (org.apache.hadoop.http.HttpServerFunctionalTest)1 JSONArray (org.json.simple.JSONArray)1 JSONObject (org.json.simple.JSONObject)1