Search in sources :

Example 21 with DataChecksum

use of org.apache.hadoop.util.DataChecksum in project hadoop by apache.

the class LocalReplicaInPipeline method createStreams.

// ReplicaInPipeline
@Override
public ReplicaOutputStreams createStreams(boolean isCreate, DataChecksum requestedChecksum) throws IOException {
    final File blockFile = getBlockFile();
    final File metaFile = getMetaFile();
    if (DataNode.LOG.isDebugEnabled()) {
        DataNode.LOG.debug("writeTo blockfile is " + blockFile + " of size " + blockFile.length());
        DataNode.LOG.debug("writeTo metafile is " + metaFile + " of size " + metaFile.length());
    }
    long blockDiskSize = 0L;
    long crcDiskSize = 0L;
    // the checksum that should actually be used -- this
    // may differ from requestedChecksum for appends.
    final DataChecksum checksum;
    final RandomAccessFile metaRAF = getFileIoProvider().getRandomAccessFile(getVolume(), metaFile, "rw");
    if (!isCreate) {
        // For append or recovery, we must enforce the existing checksum.
        // Also, verify that the file has correct lengths, etc.
        boolean checkedMeta = false;
        try {
            BlockMetadataHeader header = BlockMetadataHeader.readHeader(metaRAF);
            checksum = header.getChecksum();
            if (checksum.getBytesPerChecksum() != requestedChecksum.getBytesPerChecksum()) {
                throw new IOException("Client requested checksum " + requestedChecksum + " when appending to an existing block " + "with different chunk size: " + checksum);
            }
            int bytesPerChunk = checksum.getBytesPerChecksum();
            int checksumSize = checksum.getChecksumSize();
            blockDiskSize = bytesOnDisk;
            crcDiskSize = BlockMetadataHeader.getHeaderSize() + (blockDiskSize + bytesPerChunk - 1) / bytesPerChunk * checksumSize;
            if (blockDiskSize > 0 && (blockDiskSize > blockFile.length() || crcDiskSize > metaFile.length())) {
                throw new IOException("Corrupted block: " + this);
            }
            checkedMeta = true;
        } finally {
            if (!checkedMeta) {
                // clean up in case of exceptions.
                IOUtils.closeStream(metaRAF);
            }
        }
    } else {
        // for create, we can use the requested checksum
        checksum = requestedChecksum;
    }
    final FileIoProvider fileIoProvider = getFileIoProvider();
    FileOutputStream blockOut = null;
    FileOutputStream crcOut = null;
    try {
        blockOut = fileIoProvider.getFileOutputStream(getVolume(), new RandomAccessFile(blockFile, "rw").getFD());
        crcOut = fileIoProvider.getFileOutputStream(getVolume(), metaRAF.getFD());
        if (!isCreate) {
            blockOut.getChannel().position(blockDiskSize);
            crcOut.getChannel().position(crcDiskSize);
        }
        return new ReplicaOutputStreams(blockOut, crcOut, checksum, getVolume(), fileIoProvider);
    } catch (IOException e) {
        IOUtils.closeStream(blockOut);
        IOUtils.closeStream(crcOut);
        IOUtils.closeStream(metaRAF);
        throw e;
    }
}
Also used : RandomAccessFile(java.io.RandomAccessFile) FileOutputStream(java.io.FileOutputStream) IOException(java.io.IOException) ReplicaOutputStreams(org.apache.hadoop.hdfs.server.datanode.fsdataset.ReplicaOutputStreams) RandomAccessFile(java.io.RandomAccessFile) File(java.io.File) DataChecksum(org.apache.hadoop.util.DataChecksum)

Aggregations

DataChecksum (org.apache.hadoop.util.DataChecksum)21 IOException (java.io.IOException)13 DataInputStream (java.io.DataInputStream)6 FileInputStream (java.io.FileInputStream)6 DataOutputStream (java.io.DataOutputStream)4 File (java.io.File)4 InputStream (java.io.InputStream)4 RandomAccessFile (java.io.RandomAccessFile)4 ByteBuffer (java.nio.ByteBuffer)4 BufferedInputStream (java.io.BufferedInputStream)3 BufferedOutputStream (java.io.BufferedOutputStream)3 Path (org.apache.hadoop.fs.Path)3 Test (org.junit.Test)3 FileOutputStream (java.io.FileOutputStream)2 InterruptedIOException (java.io.InterruptedIOException)2 InvocationTargetException (java.lang.reflect.InvocationTargetException)2 InetSocketAddress (java.net.InetSocketAddress)2 Socket (java.net.Socket)2 HadoopIllegalArgumentException (org.apache.hadoop.HadoopIllegalArgumentException)2 Configuration (org.apache.hadoop.conf.Configuration)2