Search in sources :

Example 6 with NameNodeMetrics

use of org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics in project hadoop by apache.

the class ImageServlet method doGet.

@Override
public void doGet(final HttpServletRequest request, final HttpServletResponse response) throws ServletException, IOException {
    try {
        final ServletContext context = getServletContext();
        final FSImage nnImage = NameNodeHttpServer.getFsImageFromContext(context);
        final GetImageParams parsedParams = new GetImageParams(request, response);
        final Configuration conf = (Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
        final NameNodeMetrics metrics = NameNode.getNameNodeMetrics();
        validateRequest(context, conf, request, response, nnImage, parsedParams.getStorageInfoString());
        UserGroupInformation.getCurrentUser().doAs(new PrivilegedExceptionAction<Void>() {

            @Override
            public Void run() throws Exception {
                if (parsedParams.isGetImage()) {
                    long txid = parsedParams.getTxId();
                    File imageFile = null;
                    String errorMessage = "Could not find image";
                    if (parsedParams.shouldFetchLatest()) {
                        imageFile = nnImage.getStorage().getHighestFsImageName();
                    } else {
                        errorMessage += " with txid " + txid;
                        imageFile = nnImage.getStorage().getFsImage(txid, EnumSet.of(NameNodeFile.IMAGE, NameNodeFile.IMAGE_ROLLBACK));
                    }
                    if (imageFile == null) {
                        throw new IOException(errorMessage);
                    }
                    CheckpointFaultInjector.getInstance().beforeGetImageSetsHeaders();
                    long start = monotonicNow();
                    serveFile(imageFile);
                    if (metrics != null) {
                        // Metrics non-null only when used inside name node
                        long elapsed = monotonicNow() - start;
                        metrics.addGetImage(elapsed);
                    }
                } else if (parsedParams.isGetEdit()) {
                    long startTxId = parsedParams.getStartTxId();
                    long endTxId = parsedParams.getEndTxId();
                    File editFile = nnImage.getStorage().findFinalizedEditsFile(startTxId, endTxId);
                    long start = monotonicNow();
                    serveFile(editFile);
                    if (metrics != null) {
                        // Metrics non-null only when used inside name node
                        long elapsed = monotonicNow() - start;
                        metrics.addGetEdit(elapsed);
                    }
                }
                return null;
            }

            private void serveFile(File file) throws IOException {
                FileInputStream fis = new FileInputStream(file);
                try {
                    setVerificationHeadersForGet(response, file);
                    setFileNameHeaders(response, file);
                    if (!file.exists()) {
                        // process of setting headers!
                        throw new FileNotFoundException(file.toString());
                    // It's possible the file could be deleted after this point, but
                    // we've already opened the 'fis' stream.
                    // It's also possible length could change, but this would be
                    // detected by the client side as an inaccurate length header.
                    }
                    // send file
                    DataTransferThrottler throttler = parsedParams.isBootstrapStandby ? getThrottlerForBootstrapStandby(conf) : getThrottler(conf);
                    TransferFsImage.copyFileToStream(response.getOutputStream(), file, fis, throttler);
                } finally {
                    IOUtils.closeStream(fis);
                }
            }
        });
    } catch (Throwable t) {
        String errMsg = "GetImage failed. " + StringUtils.stringifyException(t);
        response.sendError(HttpServletResponse.SC_GONE, errMsg);
        throw new IOException(errMsg);
    } finally {
        response.getOutputStream().close();
    }
}
Also used : Configuration(org.apache.hadoop.conf.Configuration) DataTransferThrottler(org.apache.hadoop.hdfs.util.DataTransferThrottler) ServletException(javax.servlet.ServletException) ServletContext(javax.servlet.ServletContext) NameNodeMetrics(org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics) NameNodeFile(org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeFile)

Aggregations

NameNodeMetrics (org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics)6 IOException (java.io.IOException)3 ServletContext (javax.servlet.ServletContext)2 ServletException (javax.servlet.ServletException)2 Configuration (org.apache.hadoop.conf.Configuration)2 NameNodeFile (org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeFile)2 File (java.io.File)1 RandomAccessFile (java.io.RandomAccessFile)1 Block (org.apache.hadoop.hdfs.protocol.Block)1 ExtendedBlock (org.apache.hadoop.hdfs.protocol.ExtendedBlock)1 LocatedBlock (org.apache.hadoop.hdfs.protocol.LocatedBlock)1 LocatedStripedBlock (org.apache.hadoop.hdfs.protocol.LocatedStripedBlock)1 DatanodeDescriptor (org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor)1 RollingUpgradeStartupOption (org.apache.hadoop.hdfs.server.common.HdfsServerConstants.RollingUpgradeStartupOption)1 StartupOption (org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption)1 CachedBlock (org.apache.hadoop.hdfs.server.namenode.CachedBlock)1 DataTransferThrottler (org.apache.hadoop.hdfs.util.DataTransferThrottler)1 MD5Hash (org.apache.hadoop.io.MD5Hash)1 Test (org.junit.Test)1