use of org.apache.hadoop.fs.BlockStoragePolicySpi in project hadoop by apache.
the class DistributedFileSystem method getStoragePolicy.
@Override
public BlockStoragePolicySpi getStoragePolicy(Path path) throws IOException {
statistics.incrementReadOps(1);
storageStatistics.incrementOpCounter(OpType.GET_STORAGE_POLICY);
Path absF = fixRelativePart(path);
return new FileSystemLinkResolver<BlockStoragePolicySpi>() {
@Override
public BlockStoragePolicySpi doCall(final Path p) throws IOException {
return getClient().getStoragePolicy(getPathName(p));
}
@Override
public BlockStoragePolicySpi next(final FileSystem fs, final Path p) throws IOException {
return fs.getStoragePolicy(p);
}
}.resolve(this, absF);
}
use of org.apache.hadoop.fs.BlockStoragePolicySpi in project hadoop by apache.
the class BaseTestHttpFSWith method testStoragePolicy.
private void testStoragePolicy() throws Exception {
Assume.assumeFalse("Assume its not a local FS", isLocalFS());
FileSystem fs = FileSystem.get(getProxiedFSConf());
fs.mkdirs(getProxiedFSTestDir());
Path path = new Path(getProxiedFSTestDir(), "policy.txt");
FileSystem httpfs = getHttpFSFileSystem();
// test getAllStoragePolicies
BlockStoragePolicy[] dfsPolicies = (BlockStoragePolicy[]) fs.getAllStoragePolicies().toArray();
BlockStoragePolicy[] httpPolicies = (BlockStoragePolicy[]) httpfs.getAllStoragePolicies().toArray();
Assert.assertArrayEquals("Policy array returned from the DFS and HttpFS should be equals", dfsPolicies, httpPolicies);
// test get/set/unset policies
DFSTestUtil.createFile(fs, path, 0, (short) 1, 0L);
// get defaultPolicy
BlockStoragePolicySpi defaultdfsPolicy = fs.getStoragePolicy(path);
// set policy through webhdfs
httpfs.setStoragePolicy(path, HdfsConstants.COLD_STORAGE_POLICY_NAME);
// get policy from dfs
BlockStoragePolicySpi dfsPolicy = fs.getStoragePolicy(path);
// get policy from webhdfs
BlockStoragePolicySpi httpFsPolicy = httpfs.getStoragePolicy(path);
Assert.assertEquals("Storage policy returned from the get API should" + " be same as set policy", HdfsConstants.COLD_STORAGE_POLICY_NAME.toString(), httpFsPolicy.getName());
Assert.assertEquals("Storage policy returned from the DFS and HttpFS should be equals", httpFsPolicy, dfsPolicy);
// unset policy
httpfs.unsetStoragePolicy(path);
Assert.assertEquals("After unset storage policy, the get API shoudld" + " return the default policy", defaultdfsPolicy, httpfs.getStoragePolicy(path));
fs.close();
}
use of org.apache.hadoop.fs.BlockStoragePolicySpi in project hadoop by apache.
the class TestHdfsAdmin method testHdfsAdminStoragePolicies.
/**
* Test that we can set, get, unset storage policies via {@link HdfsAdmin}.
*/
@Test
public void testHdfsAdminStoragePolicies() throws Exception {
HdfsAdmin hdfsAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf);
FileSystem fs = FileSystem.get(conf);
final Path foo = new Path("/foo");
final Path bar = new Path(foo, "bar");
final Path wow = new Path(bar, "wow");
DFSTestUtil.createFile(fs, wow, SIZE, REPL, 0);
final BlockStoragePolicySuite suite = BlockStoragePolicySuite.createDefaultSuite();
final BlockStoragePolicy warm = suite.getPolicy("WARM");
final BlockStoragePolicy cold = suite.getPolicy("COLD");
final BlockStoragePolicy hot = suite.getPolicy("HOT");
/*
* test: set storage policy
*/
hdfsAdmin.setStoragePolicy(foo, warm.getName());
hdfsAdmin.setStoragePolicy(bar, cold.getName());
hdfsAdmin.setStoragePolicy(wow, hot.getName());
/*
* test: get storage policy after set
*/
assertEquals(hdfsAdmin.getStoragePolicy(foo), warm);
assertEquals(hdfsAdmin.getStoragePolicy(bar), cold);
assertEquals(hdfsAdmin.getStoragePolicy(wow), hot);
/*
* test: unset storage policy
*/
hdfsAdmin.unsetStoragePolicy(foo);
hdfsAdmin.unsetStoragePolicy(bar);
hdfsAdmin.unsetStoragePolicy(wow);
/*
* test: get storage policy after unset. HOT by default.
*/
assertEquals(hdfsAdmin.getStoragePolicy(foo), hot);
assertEquals(hdfsAdmin.getStoragePolicy(bar), hot);
assertEquals(hdfsAdmin.getStoragePolicy(wow), hot);
/*
* test: get all storage policies
*/
// Get policies via HdfsAdmin
Set<String> policyNamesSet1 = new HashSet<>();
for (BlockStoragePolicySpi policy : hdfsAdmin.getAllStoragePolicies()) {
policyNamesSet1.add(policy.getName());
}
// Get policies via BlockStoragePolicySuite
Set<String> policyNamesSet2 = new HashSet<>();
for (BlockStoragePolicy policy : suite.getAllPolicies()) {
policyNamesSet2.add(policy.getName());
}
// Ensure that we got the same set of policies in both cases.
Assert.assertTrue(Sets.difference(policyNamesSet1, policyNamesSet2).isEmpty());
Assert.assertTrue(Sets.difference(policyNamesSet2, policyNamesSet1).isEmpty());
}
use of org.apache.hadoop.fs.BlockStoragePolicySpi in project hadoop by apache.
the class TestWebHDFS method testStoragePolicy.
@Test
public void testStoragePolicy() throws Exception {
MiniDFSCluster cluster = null;
final Configuration conf = WebHdfsTestUtil.createConf();
final Path path = new Path("/file");
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0).build();
final DistributedFileSystem dfs = cluster.getFileSystem();
final WebHdfsFileSystem webHdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, WebHdfsConstants.WEBHDFS_SCHEME);
// test getAllStoragePolicies
BlockStoragePolicy[] dfsPolicies = (BlockStoragePolicy[]) dfs.getAllStoragePolicies().toArray();
BlockStoragePolicy[] webHdfsPolicies = (BlockStoragePolicy[]) webHdfs.getAllStoragePolicies().toArray();
Assert.assertTrue(Arrays.equals(dfsPolicies, webHdfsPolicies));
// test get/set/unset policies
DFSTestUtil.createFile(dfs, path, 0, (short) 1, 0L);
// get defaultPolicy
BlockStoragePolicySpi defaultdfsPolicy = dfs.getStoragePolicy(path);
// set policy through webhdfs
webHdfs.setStoragePolicy(path, HdfsConstants.COLD_STORAGE_POLICY_NAME);
// get policy from dfs
BlockStoragePolicySpi dfsPolicy = dfs.getStoragePolicy(path);
// get policy from webhdfs
BlockStoragePolicySpi webHdfsPolicy = webHdfs.getStoragePolicy(path);
Assert.assertEquals(HdfsConstants.COLD_STORAGE_POLICY_NAME.toString(), webHdfsPolicy.getName());
Assert.assertEquals(webHdfsPolicy, dfsPolicy);
// unset policy
webHdfs.unsetStoragePolicy(path);
Assert.assertEquals(defaultdfsPolicy, webHdfs.getStoragePolicy(path));
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
use of org.apache.hadoop.fs.BlockStoragePolicySpi in project hadoop by apache.
the class ViewFileSystem method getAllStoragePolicies.
@Override
public Collection<? extends BlockStoragePolicySpi> getAllStoragePolicies() throws IOException {
Collection<BlockStoragePolicySpi> allPolicies = new HashSet<>();
for (FileSystem fs : getChildFileSystems()) {
try {
Collection<? extends BlockStoragePolicySpi> policies = fs.getAllStoragePolicies();
allPolicies.addAll(policies);
} catch (UnsupportedOperationException e) {
// ignored
}
}
return allPolicies;
}
Aggregations