Search in sources :

Example 21 with EOFException

use of java.io.EOFException in project hadoop by apache.

the class DataNode method transferBlock.

@VisibleForTesting
void transferBlock(ExtendedBlock block, DatanodeInfo[] xferTargets, StorageType[] xferTargetStorageTypes) throws IOException {
    BPOfferService bpos = getBPOSForBlock(block);
    DatanodeRegistration bpReg = getDNRegistrationForBP(block.getBlockPoolId());
    boolean replicaNotExist = false;
    boolean replicaStateNotFinalized = false;
    boolean blockFileNotExist = false;
    boolean lengthTooShort = false;
    try {
        data.checkBlock(block, block.getNumBytes(), ReplicaState.FINALIZED);
    } catch (ReplicaNotFoundException e) {
        replicaNotExist = true;
    } catch (UnexpectedReplicaStateException e) {
        replicaStateNotFinalized = true;
    } catch (FileNotFoundException e) {
        blockFileNotExist = true;
    } catch (EOFException e) {
        lengthTooShort = true;
    } catch (IOException e) {
        // The IOException indicates not being able to access block file,
        // treat it the same here as blockFileNotExist, to trigger 
        // reporting it as a bad block
        blockFileNotExist = true;
    }
    if (replicaNotExist || replicaStateNotFinalized) {
        String errStr = "Can't send invalid block " + block;
        LOG.info(errStr);
        bpos.trySendErrorReport(DatanodeProtocol.INVALID_BLOCK, errStr);
        return;
    }
    if (blockFileNotExist) {
        // Report back to NN bad block caused by non-existent block file.
        reportBadBlock(bpos, block, "Can't replicate block " + block + " because the block file doesn't exist, or is not accessible");
        return;
    }
    if (lengthTooShort) {
        // Check if NN recorded length matches on-disk length 
        // Shorter on-disk len indicates corruption so report NN the corrupt block
        reportBadBlock(bpos, block, "Can't replicate block " + block + " because on-disk length " + data.getLength(block) + " is shorter than NameNode recorded length " + block.getNumBytes());
        return;
    }
    int numTargets = xferTargets.length;
    if (numTargets > 0) {
        StringBuilder xfersBuilder = new StringBuilder();
        for (int i = 0; i < numTargets; i++) {
            xfersBuilder.append(xferTargets[i]);
            xfersBuilder.append(" ");
        }
        LOG.info(bpReg + " Starting thread to transfer " + block + " to " + xfersBuilder);
        new Daemon(new DataTransfer(xferTargets, xferTargetStorageTypes, block, BlockConstructionStage.PIPELINE_SETUP_CREATE, "")).start();
    }
}
Also used : FileNotFoundException(java.io.FileNotFoundException) IOException(java.io.IOException) DatanodeRegistration(org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration) Daemon(org.apache.hadoop.util.Daemon) EOFException(java.io.EOFException) VisibleForTesting(com.google.common.annotations.VisibleForTesting)

Example 22 with EOFException

use of java.io.EOFException in project hadoop by apache.

the class BPServiceActor method register.

/**
   * Register one bp with the corresponding NameNode
   * <p>
   * The bpDatanode needs to register with the namenode on startup in order
   * 1) to report which storage it is serving now and 
   * 2) to receive a registrationID
   *  
   * issued by the namenode to recognize registered datanodes.
   * 
   * @param nsInfo current NamespaceInfo
   * @see FSNamesystem#registerDatanode(DatanodeRegistration)
   * @throws IOException
   */
void register(NamespaceInfo nsInfo) throws IOException {
    // The handshake() phase loaded the block pool storage
    // off disk - so update the bpRegistration object from that info
    DatanodeRegistration newBpRegistration = bpos.createRegistration();
    LOG.info(this + " beginning handshake with NN");
    while (shouldRun()) {
        try {
            // Use returned registration from namenode with updated fields
            newBpRegistration = bpNamenode.registerDatanode(newBpRegistration);
            newBpRegistration.setNamespaceInfo(nsInfo);
            bpRegistration = newBpRegistration;
            break;
        } catch (EOFException e) {
            // namenode might have just restarted
            LOG.info("Problem connecting to server: " + nnAddr + " :" + e.getLocalizedMessage());
            sleepAndLogInterrupts(1000, "connecting to server");
        } catch (SocketTimeoutException e) {
            // namenode is busy
            LOG.info("Problem connecting to server: " + nnAddr);
            sleepAndLogInterrupts(1000, "connecting to server");
        }
    }
    LOG.info("Block pool " + this + " successfully registered with NN");
    bpos.registrationSucceeded(this, bpRegistration);
    // random short delay - helps scatter the BR from all DNs
    scheduler.scheduleBlockReport(dnConf.initialBlockReportDelayMs);
}
Also used : DatanodeRegistration(org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration) SocketTimeoutException(java.net.SocketTimeoutException) EOFException(java.io.EOFException)

Example 23 with EOFException

use of java.io.EOFException in project hadoop by apache.

the class OfflineImageViewerPB method run.

public static int run(String[] args) throws Exception {
    Options options = buildOptions();
    if (args.length == 0) {
        printUsage();
        return 0;
    }
    // print help and exit with zero exit code
    if (args.length == 1 && isHelpOption(args[0])) {
        printUsage();
        return 0;
    }
    CommandLineParser parser = new PosixParser();
    CommandLine cmd;
    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        System.out.println("Error parsing command-line options: ");
        printUsage();
        return -1;
    }
    if (cmd.hasOption("h")) {
        // print help and exit with non zero exit code since
        // it is not expected to give help and other options together.
        printUsage();
        return -1;
    }
    String inputFile = cmd.getOptionValue("i");
    String processor = cmd.getOptionValue("p", "Web");
    String outputFile = cmd.getOptionValue("o", "-");
    String delimiter = cmd.getOptionValue("delimiter", PBImageDelimitedTextWriter.DEFAULT_DELIMITER);
    String tempPath = cmd.getOptionValue("t", "");
    Configuration conf = new Configuration();
    try (PrintStream out = outputFile.equals("-") ? System.out : new PrintStream(outputFile, "UTF-8")) {
        switch(processor) {
            case "FileDistribution":
                long maxSize = Long.parseLong(cmd.getOptionValue("maxSize", "0"));
                int step = Integer.parseInt(cmd.getOptionValue("step", "0"));
                boolean formatOutput = cmd.hasOption("format");
                new FileDistributionCalculator(conf, maxSize, step, formatOutput, out).visit(new RandomAccessFile(inputFile, "r"));
                break;
            case "XML":
                new PBImageXmlWriter(conf, out).visit(new RandomAccessFile(inputFile, "r"));
                break;
            case "ReverseXML":
                try {
                    OfflineImageReconstructor.run(inputFile, outputFile);
                } catch (Exception e) {
                    System.err.println("OfflineImageReconstructor failed: " + e.getMessage());
                    e.printStackTrace(System.err);
                    System.exit(1);
                }
                break;
            case "Web":
                String addr = cmd.getOptionValue("addr", "localhost:5978");
                try (WebImageViewer viewer = new WebImageViewer(NetUtils.createSocketAddr(addr))) {
                    viewer.start(inputFile);
                }
                break;
            case "Delimited":
                try (PBImageDelimitedTextWriter writer = new PBImageDelimitedTextWriter(out, delimiter, tempPath)) {
                    writer.visit(new RandomAccessFile(inputFile, "r"));
                }
                break;
            default:
                System.err.println("Invalid processor specified : " + processor);
                printUsage();
                return -1;
        }
        return 0;
    } catch (EOFException e) {
        System.err.println("Input file ended unexpectedly. Exiting");
    } catch (IOException e) {
        System.err.println("Encountered exception.  Exiting: " + e.getMessage());
        e.printStackTrace(System.err);
    }
    return -1;
}
Also used : Options(org.apache.commons.cli.Options) PrintStream(java.io.PrintStream) Configuration(org.apache.hadoop.conf.Configuration) PosixParser(org.apache.commons.cli.PosixParser) IOException(java.io.IOException) IOException(java.io.IOException) EOFException(java.io.EOFException) ParseException(org.apache.commons.cli.ParseException) CommandLine(org.apache.commons.cli.CommandLine) RandomAccessFile(java.io.RandomAccessFile) EOFException(java.io.EOFException) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException)

Example 24 with EOFException

use of java.io.EOFException in project hadoop by apache.

the class OfflineImageViewer method main.

/**
   * Entry point to command-line-driven operation.  User may specify
   * options and start fsimage viewer from the command line.  Program
   * will process image file and exit cleanly or, if an error is
   * encountered, inform user and exit.
   *
   * @param args Command line options
   * @throws IOException 
   */
public static void main(String[] args) throws IOException {
    Options options = buildOptions();
    if (args.length == 0) {
        printUsage();
        return;
    }
    CommandLineParser parser = new PosixParser();
    CommandLine cmd;
    try {
        cmd = parser.parse(options, args);
    } catch (ParseException e) {
        System.out.println("Error parsing command-line options: ");
        printUsage();
        return;
    }
    if (cmd.hasOption("h")) {
        // print help and exit
        printUsage();
        return;
    }
    boolean skipBlocks = cmd.hasOption("skipBlocks");
    boolean printToScreen = cmd.hasOption("printToScreen");
    String inputFile = cmd.getOptionValue("i");
    String processor = cmd.getOptionValue("p", "Ls");
    String outputFile = cmd.getOptionValue("o");
    String delimiter = cmd.getOptionValue("delimiter");
    if (!(delimiter == null || processor.equals("Delimited"))) {
        System.out.println("Can only specify -delimiter with Delimited processor");
        printUsage();
        return;
    }
    ImageVisitor v;
    if (processor.equals("Indented")) {
        v = new IndentedImageVisitor(outputFile, printToScreen);
    } else if (processor.equals("XML")) {
        v = new XmlImageVisitor(outputFile, printToScreen);
    } else if (processor.equals("Delimited")) {
        v = delimiter == null ? new DelimitedImageVisitor(outputFile, printToScreen) : new DelimitedImageVisitor(outputFile, printToScreen, delimiter);
        skipBlocks = false;
    } else if (processor.equals("FileDistribution")) {
        long maxSize = Long.parseLong(cmd.getOptionValue("maxSize", "0"));
        int step = Integer.parseInt(cmd.getOptionValue("step", "0"));
        boolean formatOutput = cmd.hasOption("format");
        v = new FileDistributionVisitor(outputFile, maxSize, step, formatOutput);
    } else if (processor.equals("NameDistribution")) {
        v = new NameDistributionVisitor(outputFile, printToScreen);
    } else {
        v = new LsImageVisitor(outputFile, printToScreen);
        skipBlocks = false;
    }
    try {
        OfflineImageViewer d = new OfflineImageViewer(inputFile, v, skipBlocks);
        d.go();
    } catch (EOFException e) {
        System.err.println("Input file ended unexpectedly.  Exiting");
    } catch (IOException e) {
        System.err.println("Encountered exception.  Exiting: " + e.getMessage());
    }
}
Also used : Options(org.apache.commons.cli.Options) PosixParser(org.apache.commons.cli.PosixParser) IOException(java.io.IOException) CommandLine(org.apache.commons.cli.CommandLine) EOFException(java.io.EOFException) CommandLineParser(org.apache.commons.cli.CommandLineParser) ParseException(org.apache.commons.cli.ParseException)

Example 25 with EOFException

use of java.io.EOFException in project hadoop by apache.

the class DataVerifier method verifyBytes.

/**
   * Verifies a given number of bytes from a file - less number of bytes may be
   * read if a header can not be read in due to the byte limit
   * 
   * @param byteAm
   *          the byte amount to limit to (should be less than or equal to file
   *          size)
   * 
   * @param bytesRead
   *          the starting byte location
   * 
   * @param in
   *          the input stream to read from
   * 
   * @return VerifyOutput with data about reads
   * 
   * @throws IOException
   *           if a read failure occurs
   * 
   * @throws BadFileException
   *           if a header can not be read or end of file is reached
   *           unexpectedly
   */
private VerifyOutput verifyBytes(long byteAm, long bytesRead, DataInputStream in) throws IOException, BadFileException {
    if (byteAm <= 0) {
        return new VerifyOutput(0, 0, 0, 0);
    }
    long chunksSame = 0;
    long chunksDifferent = 0;
    long readTime = 0;
    long bytesLeft = byteAm;
    long bufLeft = 0;
    long bufRead = 0;
    long seqNum = 0;
    DataHasher hasher = null;
    ByteBuffer readBuf = ByteBuffer.wrap(new byte[bufferSize]);
    while (bytesLeft > 0) {
        if (bufLeft <= 0) {
            if (bytesLeft < DataWriter.getHeaderLength()) {
                // no bytes left to read a header
                break;
            }
            // time to read a new header
            ReadInfo header = null;
            try {
                header = readHeader(in);
            } catch (EOFException e) {
                // but not on data readers
                break;
            }
            ++seqNum;
            hasher = new DataHasher(header.getHashValue());
            bufLeft = header.getByteAm();
            readTime += header.getTimeTaken();
            bytesRead += header.getBytesRead();
            bytesLeft -= header.getBytesRead();
            bufRead = 0;
            // number of bytes to read greater than how many we want to read
            if (bufLeft > bytesLeft) {
                bufLeft = bytesLeft;
            }
            // does the buffer amount have anything??
            if (bufLeft <= 0) {
                continue;
            }
        }
        // figure out the buffer size to read
        int bufSize = bufferSize;
        if (bytesLeft < bufSize) {
            bufSize = (int) bytesLeft;
        }
        if (bufLeft < bufSize) {
            bufSize = (int) bufLeft;
        }
        // read it in
        try {
            readBuf.rewind();
            long startTime = Timer.now();
            in.readFully(readBuf.array(), 0, bufSize);
            readTime += Timer.elapsed(startTime);
        } catch (EOFException e) {
            throw new BadFileException("Could not read the number of expected data bytes " + bufSize + " due to unexpected end of file during sequence " + seqNum, e);
        }
        // update the counters
        bytesRead += bufSize;
        bytesLeft -= bufSize;
        bufLeft -= bufSize;
        // verify what we read
        readBuf.rewind();
        // figure out the expected hash offset start point
        long vOffset = determineOffset(bufRead);
        // now update for new position
        bufRead += bufSize;
        // verify
        VerifyInfo verifyRes = verifyBuffer(readBuf, bufSize, vOffset, hasher);
        // update the verification counters
        chunksSame += verifyRes.getSame();
        chunksDifferent += verifyRes.getDifferent();
    }
    return new VerifyOutput(chunksSame, chunksDifferent, bytesRead, readTime);
}
Also used : EOFException(java.io.EOFException) ByteBuffer(java.nio.ByteBuffer)

Aggregations

EOFException (java.io.EOFException)552 IOException (java.io.IOException)255 FileInputStream (java.io.FileInputStream)78 DataInputStream (java.io.DataInputStream)75 Test (org.junit.Test)47 ByteArrayInputStream (java.io.ByteArrayInputStream)43 RandomAccessFile (java.io.RandomAccessFile)42 InputStream (java.io.InputStream)39 FileNotFoundException (java.io.FileNotFoundException)38 ByteBuffer (java.nio.ByteBuffer)37 File (java.io.File)36 ArrayList (java.util.ArrayList)34 BufferedInputStream (java.io.BufferedInputStream)29 ObjectInputStream (java.io.ObjectInputStream)19 Path (org.apache.hadoop.fs.Path)19 InterruptedIOException (java.io.InterruptedIOException)17 ByteArrayOutputStream (java.io.ByteArrayOutputStream)16 SocketTimeoutException (java.net.SocketTimeoutException)16 FSDataInputStream (org.apache.hadoop.fs.FSDataInputStream)16 SocketException (java.net.SocketException)14