use of org.apache.hadoop.fs.FileChecksum in project hadoop by apache.
the class HttpFSFileSystem method getFileChecksum.
@Override
public FileChecksum getFileChecksum(Path f) throws IOException {
Map<String, String> params = new HashMap<String, String>();
params.put(OP_PARAM, Operation.GETFILECHECKSUM.toString());
HttpURLConnection conn = getConnection(Operation.GETFILECHECKSUM.getMethod(), params, f, true);
HttpExceptionUtils.validateResponse(conn, HttpURLConnection.HTTP_OK);
final JSONObject json = (JSONObject) ((JSONObject) HttpFSUtils.jsonParse(conn)).get(FILE_CHECKSUM_JSON);
return new FileChecksum() {
@Override
public String getAlgorithmName() {
return (String) json.get(CHECKSUM_ALGORITHM_JSON);
}
@Override
public int getLength() {
return ((Long) json.get(CHECKSUM_LENGTH_JSON)).intValue();
}
@Override
public byte[] getBytes() {
return StringUtils.hexStringToByte((String) json.get(CHECKSUM_BYTES_JSON));
}
@Override
public void write(DataOutput out) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public void readFields(DataInput in) throws IOException {
throw new UnsupportedOperationException();
}
};
}
use of org.apache.hadoop.fs.FileChecksum in project hadoop by apache.
the class TestFileChecksum method testStripedFileChecksumWithMissedDataBlocks1.
@Test(timeout = 90000)
public void testStripedFileChecksumWithMissedDataBlocks1() throws Exception {
prepareTestFiles(fileSize, new String[] { stripedFile1 });
FileChecksum stripedFileChecksum1 = getFileChecksum(stripedFile1, fileSize, false);
FileChecksum stripedFileChecksumRecon = getFileChecksum(stripedFile1, fileSize, true);
LOG.info("stripedFileChecksum1:" + stripedFileChecksum1);
LOG.info("stripedFileChecksumRecon:" + stripedFileChecksumRecon);
Assert.assertTrue("Checksum mismatches!", stripedFileChecksum1.equals(stripedFileChecksumRecon));
}
use of org.apache.hadoop.fs.FileChecksum in project hadoop by apache.
the class TestFileChecksum method testStripedFileChecksumWithMissedDataBlocksRangeQuery.
private void testStripedFileChecksumWithMissedDataBlocksRangeQuery(String stripedFile, int requestedLen) throws Exception {
LOG.info("Checksum file:{}, requested length:{}", stripedFile, requestedLen);
prepareTestFiles(fileSize, new String[] { stripedFile });
FileChecksum stripedFileChecksum1 = getFileChecksum(stripedFile, requestedLen, false);
FileChecksum stripedFileChecksumRecon = getFileChecksum(stripedFile, requestedLen, true);
LOG.info("stripedFileChecksum1:" + stripedFileChecksum1);
LOG.info("stripedFileChecksumRecon:" + stripedFileChecksumRecon);
Assert.assertTrue("Checksum mismatches!", stripedFileChecksum1.equals(stripedFileChecksumRecon));
}
use of org.apache.hadoop.fs.FileChecksum in project hadoop by apache.
the class TestFileChecksum method getFileChecksum.
private FileChecksum getFileChecksum(String filePath, int range, boolean killDn) throws Exception {
int dnIdxToDie = -1;
if (killDn) {
dnIdxToDie = getDataNodeToKill(filePath);
DataNode dnToDie = cluster.getDataNodes().get(dnIdxToDie);
shutdownDataNode(dnToDie);
}
Path testPath = new Path(filePath);
FileChecksum fc;
if (range >= 0) {
fc = fs.getFileChecksum(testPath, range);
} else {
fc = fs.getFileChecksum(testPath);
}
if (dnIdxToDie != -1) {
cluster.restartDataNode(dnIdxToDie);
}
return fc;
}
use of org.apache.hadoop.fs.FileChecksum in project hadoop by apache.
the class TestGetFileChecksum method testGetFileChecksum.
public void testGetFileChecksum(final Path foo, final int appendLength) throws Exception {
final int appendRounds = 16;
FileChecksum[] fc = new FileChecksum[appendRounds + 1];
DFSTestUtil.createFile(dfs, foo, appendLength, REPLICATION, 0L);
fc[0] = dfs.getFileChecksum(foo);
for (int i = 0; i < appendRounds; i++) {
DFSTestUtil.appendFile(dfs, foo, appendLength);
fc[i + 1] = dfs.getFileChecksum(foo);
}
for (int i = 0; i < appendRounds + 1; i++) {
FileChecksum checksum = dfs.getFileChecksum(foo, appendLength * (i + 1));
Assert.assertTrue(checksum.equals(fc[i]));
}
}
Aggregations