use of org.apache.hadoop.hdfs.protocol.BlockStoragePolicy in project hadoop by apache.
the class TestStoragePolicyCommands method testSetAndGetStoragePolicy.
@Test
public void testSetAndGetStoragePolicy() throws Exception {
final Path foo = new Path("/foo");
final Path bar = new Path(foo, "bar");
DFSTestUtil.createFile(fs, bar, SIZE, REPL, 0);
final StoragePolicyAdmin admin = new StoragePolicyAdmin(conf);
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /foo", 0, "The storage policy of " + foo.toString() + " is unspecified");
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /foo/bar", 0, "The storage policy of " + bar.toString() + " is unspecified");
DFSTestUtil.toolRun(admin, "-setStoragePolicy -path /foo -policy WARM", 0, "Set storage policy WARM on " + foo.toString());
DFSTestUtil.toolRun(admin, "-setStoragePolicy -path /foo/bar -policy COLD", 0, "Set storage policy COLD on " + bar.toString());
DFSTestUtil.toolRun(admin, "-setStoragePolicy -path /fooz -policy WARM", 2, "File/Directory does not exist: /fooz");
final BlockStoragePolicySuite suite = BlockStoragePolicySuite.createDefaultSuite();
final BlockStoragePolicy warm = suite.getPolicy("WARM");
final BlockStoragePolicy cold = suite.getPolicy("COLD");
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /foo", 0, "The storage policy of " + foo.toString() + ":\n" + warm);
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /foo/bar", 0, "The storage policy of " + bar.toString() + ":\n" + cold);
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /fooz", 2, "File/Directory does not exist: /fooz");
}
use of org.apache.hadoop.hdfs.protocol.BlockStoragePolicy in project hadoop by apache.
the class TestStoragePolicyCommands method testSetAndUnsetStoragePolicy.
@Test
public void testSetAndUnsetStoragePolicy() throws Exception {
final Path foo = new Path("/foo");
final Path bar = new Path(foo, "bar");
final Path wow = new Path(bar, "wow");
DFSTestUtil.createFile(fs, wow, SIZE, REPL, 0);
/*
* test: set storage policy
*/
final StoragePolicyAdmin admin = new StoragePolicyAdmin(conf);
DFSTestUtil.toolRun(admin, "-setStoragePolicy -path " + fs.getUri() + "/foo -policy WARM", 0, "Set storage policy WARM on " + fs.getUri() + "/foo");
DFSTestUtil.toolRun(admin, "-setStoragePolicy -path /foo/bar -policy COLD", 0, "Set storage policy COLD on " + bar.toString());
DFSTestUtil.toolRun(admin, "-setStoragePolicy -path /foo/bar/wow -policy HOT", 0, "Set storage policy HOT on " + wow.toString());
DFSTestUtil.toolRun(admin, "-setStoragePolicy -path /fooz -policy WARM", 2, "File/Directory does not exist: /fooz");
/*
* test: get storage policy after set
*/
final BlockStoragePolicySuite suite = BlockStoragePolicySuite.createDefaultSuite();
final BlockStoragePolicy warm = suite.getPolicy("WARM");
final BlockStoragePolicy cold = suite.getPolicy("COLD");
final BlockStoragePolicy hot = suite.getPolicy("HOT");
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path " + fs.getUri() + "/foo", 0, "The storage policy of " + fs.getUri() + "/foo:\n" + warm);
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /foo/bar", 0, "The storage policy of " + bar.toString() + ":\n" + cold);
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /foo/bar/wow", 0, "The storage policy of " + wow.toString() + ":\n" + hot);
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /fooz", 2, "File/Directory does not exist: /fooz");
/*
* test: unset storage policy
*/
DFSTestUtil.toolRun(admin, "-unsetStoragePolicy -path " + fs.getUri() + "/foo", 0, "Unset storage policy from " + fs.getUri() + "/foo");
DFSTestUtil.toolRun(admin, "-unsetStoragePolicy -path /foo/bar", 0, "Unset storage policy from " + bar.toString());
DFSTestUtil.toolRun(admin, "-unsetStoragePolicy -path /foo/bar/wow", 0, "Unset storage policy from " + wow.toString());
DFSTestUtil.toolRun(admin, "-unsetStoragePolicy -path /fooz", 2, "File/Directory does not exist: /fooz");
/*
* test: get storage policy after unset
*/
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /foo", 0, "The storage policy of " + foo.toString() + " is unspecified");
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /foo/bar", 0, "The storage policy of " + bar.toString() + " is unspecified");
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /foo/bar/wow", 0, "The storage policy of " + wow.toString() + " is unspecified");
DFSTestUtil.toolRun(admin, "-getStoragePolicy -path /fooz", 2, "File/Directory does not exist: /fooz");
}
use of org.apache.hadoop.hdfs.protocol.BlockStoragePolicy in project hbase by apache.
the class TestHFileOutputFormat2 method getStoragePolicyNameForOldHDFSVersion.
private String getStoragePolicyNameForOldHDFSVersion(FileSystem fs, Path path) {
try {
if (fs instanceof DistributedFileSystem) {
DistributedFileSystem dfs = (DistributedFileSystem) fs;
HdfsFileStatus status = dfs.getClient().getFileInfo(path.toUri().getPath());
if (null != status) {
byte storagePolicyId = status.getStoragePolicy();
Field idUnspecified = BlockStoragePolicySuite.class.getField("ID_UNSPECIFIED");
if (storagePolicyId != idUnspecified.getByte(BlockStoragePolicySuite.class)) {
BlockStoragePolicy[] policies = dfs.getStoragePolicies();
for (BlockStoragePolicy policy : policies) {
if (policy.getId() == storagePolicyId) {
return policy.getName();
}
}
}
}
}
} catch (Throwable e) {
LOG.warn("failed to get block storage policy of [" + path + "]", e);
}
return null;
}
use of org.apache.hadoop.hdfs.protocol.BlockStoragePolicy in project SSM by Intel-bigdata.
the class MovePlanMaker method processFile.
/**
* @return true if it is necessary to run another round of migration
*/
private void processFile(String fullPath, HdfsLocatedFileStatus status, String destPolicy) throws IOException {
final BlockStoragePolicy policy = mapStoragePolicies.get(destPolicy);
if (policy == null) {
LOG.warn("Failed to get the storage policy of file " + fullPath);
return;
}
List<String> types = CompatibilityHelperLoader.getHelper().chooseStorageTypes(policy, status.getReplication());
final LocatedBlocks locatedBlocks = CompatibilityHelperLoader.getHelper().getLocatedBlocks(status);
final boolean lastBlkComplete = locatedBlocks.isLastBlockComplete();
schedulePlan.setBeingWritten(!lastBlkComplete);
List<LocatedBlock> lbs = locatedBlocks.getLocatedBlocks();
for (int i = 0; i < lbs.size(); i++) {
if (i == lbs.size() - 1 && !lastBlkComplete) {
// last block is incomplete, skip it
continue;
}
LocatedBlock lb = lbs.get(i);
List<String> typesForEcBlock = CompatibilityHelperLoader.getHelper().getStorageTypeForEcBlock(lb, policy, status.getStoragePolicy());
if (typesForEcBlock != null) {
types = typesForEcBlock;
}
final StorageTypeDiff diff = new StorageTypeDiff(types, CompatibilityHelperLoader.getHelper().getStorageTypes(lb));
int remainingReplications = diff.removeOverlap(true);
long toMove = lb.getBlockSize() * remainingReplications;
schedulePlan.addSizeToMove(toMove);
schedulePlan.incBlocksToMove();
schedulePlan.addFileLengthToMove(lb.getBlockSize());
statistics.increaseTotalSize(toMove);
statistics.increaseTotalBlocks(remainingReplications);
if (remainingReplications != 0) {
scheduleMoveBlock(diff, lb, status);
}
}
}
use of org.apache.hadoop.hdfs.protocol.BlockStoragePolicy in project SSM by Intel-bigdata.
the class MovePlanMaker method initStoragePolicies.
private void initStoragePolicies() throws IOException {
BlockStoragePolicy[] policies = dfs.getStoragePolicies();
for (BlockStoragePolicy policy : policies) {
mapStoragePolicies.put(policy.getName(), policy);
mapPolicyIdToName.put(policy.getId(), policy.getName());
}
}
Aggregations