Search in sources :

Example 6 with InterruptedIOException

use of java.io.InterruptedIOException in project hadoop by apache.

the class DFSInputStream method readBlockLength.

/** Read the block length from one of the datanodes. */
private long readBlockLength(LocatedBlock locatedblock) throws IOException {
    assert locatedblock != null : "LocatedBlock cannot be null";
    int replicaNotFoundCount = locatedblock.getLocations().length;
    final DfsClientConf conf = dfsClient.getConf();
    final int timeout = conf.getSocketTimeout();
    LinkedList<DatanodeInfo> nodeList = new LinkedList<DatanodeInfo>(Arrays.asList(locatedblock.getLocations()));
    LinkedList<DatanodeInfo> retryList = new LinkedList<DatanodeInfo>();
    boolean isRetry = false;
    StopWatch sw = new StopWatch();
    while (nodeList.size() > 0) {
        DatanodeInfo datanode = nodeList.pop();
        ClientDatanodeProtocol cdp = null;
        try {
            cdp = DFSUtilClient.createClientDatanodeProtocolProxy(datanode, dfsClient.getConfiguration(), timeout, conf.isConnectToDnViaHostname(), locatedblock);
            final long n = cdp.getReplicaVisibleLength(locatedblock.getBlock());
            if (n >= 0) {
                return n;
            }
        } catch (IOException ioe) {
            checkInterrupted(ioe);
            if (ioe instanceof RemoteException) {
                if (((RemoteException) ioe).unwrapRemoteException() instanceof ReplicaNotFoundException) {
                    // replica is not on the DN. We will treat it as 0 length
                    // if no one actually has a replica.
                    replicaNotFoundCount--;
                } else if (((RemoteException) ioe).unwrapRemoteException() instanceof RetriableException) {
                    // add to the list to be retried if necessary.
                    retryList.add(datanode);
                }
            }
            DFSClient.LOG.debug("Failed to getReplicaVisibleLength from datanode {}" + " for block {}", datanode, locatedblock.getBlock(), ioe);
        } finally {
            if (cdp != null) {
                RPC.stopProxy(cdp);
            }
        }
        // Ran out of nodes, but there are retriable nodes.
        if (nodeList.size() == 0 && retryList.size() > 0) {
            nodeList.addAll(retryList);
            retryList.clear();
            isRetry = true;
        }
        if (isRetry) {
            // start the stop watch if not already running.
            if (!sw.isRunning()) {
                sw.start();
            }
            try {
                // delay between retries.
                Thread.sleep(500);
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
                throw new InterruptedIOException("Interrupted while getting the length.");
            }
        }
        // see if we ran out of retry time
        if (sw.isRunning() && sw.now(TimeUnit.MILLISECONDS) > timeout) {
            break;
        }
    }
    // on a DN that has it.  we want to report that error
    if (replicaNotFoundCount == 0) {
        return 0;
    }
    throw new IOException("Cannot obtain block length for " + locatedblock);
}
Also used : InterruptedIOException(java.io.InterruptedIOException) DatanodeInfo(org.apache.hadoop.hdfs.protocol.DatanodeInfo) ReplicaNotFoundException(org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException) InterruptedIOException(java.io.InterruptedIOException) IOException(java.io.IOException) ClientDatanodeProtocol(org.apache.hadoop.hdfs.protocol.ClientDatanodeProtocol) LinkedList(java.util.LinkedList) StopWatch(org.apache.hadoop.util.StopWatch) DfsClientConf(org.apache.hadoop.hdfs.client.impl.DfsClientConf) RemoteException(org.apache.hadoop.ipc.RemoteException) RetriableException(org.apache.hadoop.ipc.RetriableException)

Example 7 with InterruptedIOException

use of java.io.InterruptedIOException in project hadoop by apache.

the class DataStreamer method waitForAckedSeqno.

/**
   * wait for the ack of seqno
   *
   * @param seqno the sequence number to be acked
   * @throws IOException
   */
void waitForAckedSeqno(long seqno) throws IOException {
    try (TraceScope ignored = dfsClient.getTracer().newScope("waitForAckedSeqno")) {
        LOG.debug("{} waiting for ack for: {}", this, seqno);
        long begin = Time.monotonicNow();
        try {
            synchronized (dataQueue) {
                while (!streamerClosed) {
                    checkClosed();
                    if (lastAckedSeqno >= seqno) {
                        break;
                    }
                    try {
                        // when we receive an ack, we notify on
                        dataQueue.wait(1000);
                    // dataQueue
                    } catch (InterruptedException ie) {
                        throw new InterruptedIOException("Interrupted while waiting for data to be acknowledged by pipeline");
                    }
                }
            }
            checkClosed();
        } catch (ClosedChannelException cce) {
        }
        long duration = Time.monotonicNow() - begin;
        if (duration > dfsclientSlowLogThresholdMs) {
            LOG.warn("Slow waitForAckedSeqno took {}ms (threshold={}ms). File being" + " written: {}, block: {}, Write pipeline datanodes: {}.", duration, dfsclientSlowLogThresholdMs, src, block, nodes);
        }
    }
}
Also used : InterruptedIOException(java.io.InterruptedIOException) ClosedChannelException(java.nio.channels.ClosedChannelException) TraceScope(org.apache.htrace.core.TraceScope)

Example 8 with InterruptedIOException

use of java.io.InterruptedIOException in project hadoop by apache.

the class DFSOutputStream method createPacket.

/** Use {@link ByteArrayManager} to create buffer for non-heartbeat packets.*/
protected DFSPacket createPacket(int packetSize, int chunksPerPkt, long offsetInBlock, long seqno, boolean lastPacketInBlock) throws InterruptedIOException {
    final byte[] buf;
    final int bufferSize = PacketHeader.PKT_MAX_HEADER_LEN + packetSize;
    try {
        buf = byteArrayManager.newByteArray(bufferSize);
    } catch (InterruptedException ie) {
        final InterruptedIOException iioe = new InterruptedIOException("seqno=" + seqno);
        iioe.initCause(ie);
        throw iioe;
    }
    return new DFSPacket(buf, chunksPerPkt, offsetInBlock, seqno, getChecksumSize(), lastPacketInBlock);
}
Also used : InterruptedIOException(java.io.InterruptedIOException)

Example 9 with InterruptedIOException

use of java.io.InterruptedIOException in project hadoop by apache.

the class RecoveredContainerLaunch method call.

/**
   * Wait on the process specified in pid file and return its exit code
   */
@SuppressWarnings("unchecked")
@Override
public Integer call() {
    int retCode = ExitCode.LOST.getExitCode();
    ContainerId containerId = container.getContainerId();
    String appIdStr = containerId.getApplicationAttemptId().getApplicationId().toString();
    String containerIdStr = containerId.toString();
    dispatcher.getEventHandler().handle(new ContainerEvent(containerId, ContainerEventType.CONTAINER_LAUNCHED));
    boolean notInterrupted = true;
    try {
        File pidFile = locatePidFile(appIdStr, containerIdStr);
        if (pidFile != null) {
            String pidPathStr = pidFile.getPath();
            pidFilePath = new Path(pidPathStr);
            exec.activateContainer(containerId, pidFilePath);
            retCode = exec.reacquireContainer(new ContainerReacquisitionContext.Builder().setContainer(container).setUser(container.getUser()).setContainerId(containerId).build());
        } else {
            LOG.warn("Unable to locate pid file for container " + containerIdStr);
        }
    } catch (InterruptedException | InterruptedIOException e) {
        LOG.warn("Interrupted while waiting for exit code from " + containerId);
        notInterrupted = false;
    } catch (IOException e) {
        LOG.error("Unable to recover container " + containerIdStr, e);
    } finally {
        if (notInterrupted) {
            this.completed.set(true);
            exec.deactivateContainer(containerId);
            try {
                getContext().getNMStateStore().storeContainerCompleted(containerId, retCode);
            } catch (IOException e) {
                LOG.error("Unable to set exit code for container " + containerId);
            }
        }
    }
    if (retCode != 0) {
        LOG.warn("Recovered container exited with a non-zero exit code " + retCode);
        this.dispatcher.getEventHandler().handle(new ContainerExitEvent(containerId, ContainerEventType.CONTAINER_EXITED_WITH_FAILURE, retCode, "Container exited with a non-zero exit code " + retCode));
        return retCode;
    }
    LOG.info("Recovered container " + containerId + " succeeded");
    dispatcher.getEventHandler().handle(new ContainerEvent(containerId, ContainerEventType.CONTAINER_EXITED_WITH_SUCCESS));
    return 0;
}
Also used : ContainerEvent(org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEvent) Path(org.apache.hadoop.fs.Path) InterruptedIOException(java.io.InterruptedIOException) ContainerExitEvent(org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerExitEvent) IOException(java.io.IOException) InterruptedIOException(java.io.InterruptedIOException) ContainerId(org.apache.hadoop.yarn.api.records.ContainerId) File(java.io.File)

Example 10 with InterruptedIOException

use of java.io.InterruptedIOException in project hadoop by apache.

the class S3AFileSystem method copyFile.

/**
   * Copy a single object in the bucket via a COPY operation.
   * @param srcKey source object path
   * @param dstKey destination object path
   * @param size object size
   * @throws AmazonClientException on failures inside the AWS SDK
   * @throws InterruptedIOException the operation was interrupted
   * @throws IOException Other IO problems
   */
private void copyFile(String srcKey, String dstKey, long size) throws IOException, InterruptedIOException, AmazonClientException {
    LOG.debug("copyFile {} -> {} ", srcKey, dstKey);
    try {
        ObjectMetadata srcom = getObjectMetadata(srcKey);
        ObjectMetadata dstom = cloneObjectMetadata(srcom);
        setOptionalObjectMetadata(dstom);
        CopyObjectRequest copyObjectRequest = new CopyObjectRequest(bucket, srcKey, bucket, dstKey);
        setOptionalCopyObjectRequestParameters(copyObjectRequest);
        copyObjectRequest.setCannedAccessControlList(cannedACL);
        copyObjectRequest.setNewObjectMetadata(dstom);
        ProgressListener progressListener = new ProgressListener() {

            public void progressChanged(ProgressEvent progressEvent) {
                switch(progressEvent.getEventType()) {
                    case TRANSFER_PART_COMPLETED_EVENT:
                        incrementWriteOperations();
                        break;
                    default:
                        break;
                }
            }
        };
        Copy copy = transfers.copy(copyObjectRequest);
        copy.addProgressListener(progressListener);
        try {
            copy.waitForCopyResult();
            incrementWriteOperations();
            instrumentation.filesCopied(1, size);
        } catch (InterruptedException e) {
            throw new InterruptedIOException("Interrupted copying " + srcKey + " to " + dstKey + ", cancelling");
        }
    } catch (AmazonClientException e) {
        throw translateException("copyFile(" + srcKey + ", " + dstKey + ")", srcKey, e);
    }
}
Also used : InterruptedIOException(java.io.InterruptedIOException) CopyObjectRequest(com.amazonaws.services.s3.model.CopyObjectRequest) ProgressListener(com.amazonaws.event.ProgressListener) Copy(com.amazonaws.services.s3.transfer.Copy) AmazonClientException(com.amazonaws.AmazonClientException) ProgressEvent(com.amazonaws.event.ProgressEvent) ObjectMetadata(com.amazonaws.services.s3.model.ObjectMetadata)

Aggregations

InterruptedIOException (java.io.InterruptedIOException)274 IOException (java.io.IOException)186 Test (org.junit.Test)39 ArrayList (java.util.ArrayList)27 Socket (java.net.Socket)26 ConnectException (java.net.ConnectException)22 ExecutionException (java.util.concurrent.ExecutionException)22 InputStream (java.io.InputStream)21 InetSocketAddress (java.net.InetSocketAddress)21 ByteBuffer (java.nio.ByteBuffer)21 Path (org.apache.hadoop.fs.Path)20 NoRouteToHostException (java.net.NoRouteToHostException)19 ServletException (javax.servlet.ServletException)17 CountDownLatch (java.util.concurrent.CountDownLatch)16 SocketTimeoutException (java.net.SocketTimeoutException)15 HttpServletRequest (javax.servlet.http.HttpServletRequest)15 HttpServletResponse (javax.servlet.http.HttpServletResponse)15 EOFException (java.io.EOFException)14 SocketException (java.net.SocketException)14 OutputStream (java.io.OutputStream)13