Search in sources :

Example 16 with FileChecksum

use of org.apache.hadoop.fs.FileChecksum in project cdap by caskdata.

the class ReplicationStatusTool method getClusterChecksumMap.

private static SortedMap<String, String> getClusterChecksumMap() throws IOException {
    FileSystem fileSystem = FileSystem.get(hConf);
    List<String> fileList = addAllFiles(fileSystem);
    SortedMap<String, String> checksumMap = new TreeMap<String, String>();
    for (String file : fileList) {
        FileChecksum fileChecksum = fileSystem.getFileChecksum(new Path(file));
        checksumMap.put(normalizedFileName(file), fileChecksum.toString());
    }
    LOG.info("Added " + checksumMap.size() + " checksums for snapshot files.");
    return checksumMap;
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) TreeMap(java.util.TreeMap) FileChecksum(org.apache.hadoop.fs.FileChecksum)

Example 17 with FileChecksum

use of org.apache.hadoop.fs.FileChecksum in project hadoop by apache.

the class BaseTestHttpFSWith method testChecksum.

private void testChecksum() throws Exception {
    if (!isLocalFS()) {
        FileSystem fs = FileSystem.get(getProxiedFSConf());
        fs.mkdirs(getProxiedFSTestDir());
        Path path = new Path(getProxiedFSTestDir(), "foo.txt");
        OutputStream os = fs.create(path);
        os.write(1);
        os.close();
        FileChecksum hdfsChecksum = fs.getFileChecksum(path);
        fs.close();
        fs = getHttpFSFileSystem();
        FileChecksum httpChecksum = fs.getFileChecksum(path);
        fs.close();
        assertEquals(httpChecksum.getAlgorithmName(), hdfsChecksum.getAlgorithmName());
        assertEquals(httpChecksum.getLength(), hdfsChecksum.getLength());
        assertArrayEquals(httpChecksum.getBytes(), hdfsChecksum.getBytes());
    }
}
Also used : Path(org.apache.hadoop.fs.Path) FileSystem(org.apache.hadoop.fs.FileSystem) OutputStream(java.io.OutputStream) FileOutputStream(java.io.FileOutputStream) FileChecksum(org.apache.hadoop.fs.FileChecksum)

Example 18 with FileChecksum

use of org.apache.hadoop.fs.FileChecksum in project hadoop by apache.

the class TestViewFileSystemHdfs method testFileChecksum.

@Test
public void testFileChecksum() throws IOException {
    ViewFileSystem viewFs = (ViewFileSystem) fsView;
    Path mountDataRootPath = new Path("/data");
    String fsTargetFileName = "debug.log";
    Path fsTargetFilePath = new Path(targetTestRoot, "data/debug.log");
    Path mountDataFilePath = new Path(mountDataRootPath, fsTargetFileName);
    fileSystemTestHelper.createFile(fsTarget, fsTargetFilePath);
    FileStatus fileStatus = viewFs.getFileStatus(mountDataFilePath);
    long fileLength = fileStatus.getLen();
    FileChecksum fileChecksumViaViewFs = viewFs.getFileChecksum(mountDataFilePath);
    FileChecksum fileChecksumViaTargetFs = fsTarget.getFileChecksum(fsTargetFilePath);
    Assert.assertTrue("File checksum not matching!", fileChecksumViaViewFs.equals(fileChecksumViaTargetFs));
    fileChecksumViaViewFs = viewFs.getFileChecksum(mountDataFilePath, fileLength / 2);
    fileChecksumViaTargetFs = fsTarget.getFileChecksum(fsTargetFilePath, fileLength / 2);
    Assert.assertTrue("File checksum not matching!", fileChecksumViaViewFs.equals(fileChecksumViaTargetFs));
}
Also used : Path(org.apache.hadoop.fs.Path) FileStatus(org.apache.hadoop.fs.FileStatus) FileChecksum(org.apache.hadoop.fs.FileChecksum) Test(org.junit.Test)

Example 19 with FileChecksum

use of org.apache.hadoop.fs.FileChecksum in project hadoop by apache.

the class TestDFSClientRetries method testGetFileChecksum.

@Test
public void testGetFileChecksum() throws Exception {
    final String f = "/testGetFileChecksum";
    final Path p = new Path(f);
    final MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build();
    try {
        cluster.waitActive();
        //create a file
        final FileSystem fs = cluster.getFileSystem();
        DFSTestUtil.createFile(fs, p, 1L << 20, (short) 3, 20100402L);
        //get checksum
        final FileChecksum cs1 = fs.getFileChecksum(p);
        assertTrue(cs1 != null);
        //stop the first datanode
        final List<LocatedBlock> locatedblocks = DFSClient.callGetBlockLocations(cluster.getNameNodeRpc(), f, 0, Long.MAX_VALUE).getLocatedBlocks();
        final DatanodeInfo first = locatedblocks.get(0).getLocations()[0];
        cluster.stopDataNode(first.getXferAddr());
        //get checksum again
        final FileChecksum cs2 = fs.getFileChecksum(p);
        assertEquals(cs1, cs2);
    } finally {
        cluster.shutdown();
    }
}
Also used : Path(org.apache.hadoop.fs.Path) DatanodeInfo(org.apache.hadoop.hdfs.protocol.DatanodeInfo) FileSystem(org.apache.hadoop.fs.FileSystem) LocatedBlock(org.apache.hadoop.hdfs.protocol.LocatedBlock) Matchers.anyString(org.mockito.Matchers.anyString) FileChecksum(org.apache.hadoop.fs.FileChecksum) Test(org.junit.Test)

Example 20 with FileChecksum

use of org.apache.hadoop.fs.FileChecksum in project hadoop by apache.

the class TestEncryptedTransfer method testLongLivedReadClientAfterRestart.

@Test
public void testLongLivedReadClientAfterRestart() throws IOException {
    FileChecksum checksum = writeUnencryptedAndThenRestartEncryptedCluster();
    assertEquals(PLAIN_TEXT, DFSTestUtil.readFile(fs, TEST_PATH));
    assertEquals(checksum, fs.getFileChecksum(TEST_PATH));
    // Restart the NN and DN, after which the client's encryption key will no
    // longer be valid.
    cluster.restartNameNode();
    assertTrue(cluster.restartDataNode(0));
    assertEquals(PLAIN_TEXT, DFSTestUtil.readFile(fs, TEST_PATH));
    assertEquals(checksum, fs.getFileChecksum(TEST_PATH));
}
Also used : FileChecksum(org.apache.hadoop.fs.FileChecksum) Test(org.junit.Test)

Aggregations

FileChecksum (org.apache.hadoop.fs.FileChecksum)28 Path (org.apache.hadoop.fs.Path)13 Test (org.junit.Test)11 FileSystem (org.apache.hadoop.fs.FileSystem)8 IOException (java.io.IOException)6 ArrayList (java.util.ArrayList)2 Configuration (org.apache.hadoop.conf.Configuration)2 FSDataOutputStream (org.apache.hadoop.fs.FSDataOutputStream)2 FileStatus (org.apache.hadoop.fs.FileStatus)2 DatanodeInfo (org.apache.hadoop.hdfs.protocol.DatanodeInfo)2 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)2 DataInput (java.io.DataInput)1 DataOutput (java.io.DataOutput)1 FileNotFoundException (java.io.FileNotFoundException)1 FileOutputStream (java.io.FileOutputStream)1 OutputStream (java.io.OutputStream)1 HttpURLConnection (java.net.HttpURLConnection)1 SocketTimeoutException (java.net.SocketTimeoutException)1 HashMap (java.util.HashMap)1 TreeMap (java.util.TreeMap)1