Search in sources :

Example 11 with CrawlerSystemException

use of org.codelibs.fess.crawler.exception.CrawlerSystemException in project fess-crawler by codelibs.

the class FileSystemClient method getResponseData.

protected ResponseData getResponseData(final String uri, final boolean includeContent) {
    final ResponseData responseData = new ResponseData();
    try {
        responseData.setMethod(Constants.GET_METHOD);
        final String filePath = preprocessUri(uri);
        responseData.setUrl(filePath);
        File file = null;
        try {
            file = new File(new URI(filePath));
        } catch (final URISyntaxException e) {
            logger.warn("Could not parse url: " + filePath, e);
        }
        if (file == null) {
            responseData.setHttpStatusCode(Constants.NOT_FOUND_STATUS_CODE);
            responseData.setCharSet(charset);
            responseData.setContentLength(0);
        } else if (file.isFile()) {
            // check file size
            responseData.setContentLength(file.length());
            checkMaxContentLength(responseData);
            try {
                final FileOwnerAttributeView ownerAttrView = Files.getFileAttributeView(file.toPath(), FileOwnerAttributeView.class);
                if (ownerAttrView != null) {
                    UserPrincipal owner = ownerAttrView.getOwner();
                    if (owner != null) {
                        responseData.addMetaData(FS_FILE_USER, owner.getName());
                    }
                }
            } catch (Exception e) {
                logger.warn("Failed to parse FileOwnerAttributeView.", e);
            }
            try {
                final AclFileAttributeView aclView = Files.getFileAttributeView(file.toPath(), AclFileAttributeView.class);
                if (aclView != null) {
                    responseData.addMetaData(FILE_ATTRIBUTE_VIEW, aclView);
                    responseData.addMetaData(FS_FILE_GROUPS, aclView.getAcl().stream().map(acl -> acl.principal().getName()).toArray(n -> new String[n]));
                }
            } catch (Exception e) {
                logger.warn("Failed to parse AclFileAttributeView.", e);
            }
            try {
                final PosixFileAttributeView posixView = Files.getFileAttributeView(file.toPath(), PosixFileAttributeView.class);
                if (posixView != null) {
                    responseData.addMetaData(FILE_ATTRIBUTE_VIEW, posixView);
                    responseData.addMetaData(FS_FILE_GROUPS, new String[] { posixView.readAttributes().group().getName() });
                }
            } catch (Exception e) {
                logger.warn("Failed to parse PosixFileAttributeView.", e);
            }
            responseData.setHttpStatusCode(Constants.OK_STATUS_CODE);
            responseData.setCharSet(geCharSet(file));
            responseData.setLastModified(new Date(file.lastModified()));
            if (file.canRead()) {
                final MimeTypeHelper mimeTypeHelper = crawlerContainer.getComponent("mimeTypeHelper");
                try (final InputStream is = new BufferedInputStream(new FileInputStream(file))) {
                    responseData.setMimeType(mimeTypeHelper.getContentType(is, file.getName()));
                } catch (final Exception e) {
                    responseData.setMimeType(mimeTypeHelper.getContentType(null, file.getName()));
                }
                if (contentLengthHelper != null) {
                    final long maxLength = contentLengthHelper.getMaxLength(responseData.getMimeType());
                    if (responseData.getContentLength() > maxLength) {
                        throw new MaxLengthExceededException("The content length (" + responseData.getContentLength() + " byte) is over " + maxLength + " byte. The url is " + filePath);
                    }
                }
                if (includeContent) {
                    if (file.length() < maxCachedContentSize) {
                        try (InputStream contentStream = new BufferedInputStream(new FileInputStream(file))) {
                            responseData.setResponseBody(InputStreamUtil.getBytes(contentStream));
                        } catch (final Exception e) {
                            logger.warn("I/O Exception.", e);
                            responseData.setHttpStatusCode(Constants.SERVER_ERROR_STATUS_CODE);
                        }
                    } else {
                        responseData.setResponseBody(file, false);
                    }
                }
            } else {
                // Forbidden
                responseData.setHttpStatusCode(Constants.FORBIDDEN_STATUS_CODE);
                responseData.setMimeType(APPLICATION_OCTET_STREAM);
            }
        } else if (file.isDirectory()) {
            final Set<RequestData> requestDataSet = new HashSet<>();
            if (includeContent) {
                final File[] files = file.listFiles();
                if (files != null) {
                    for (final File f : files) {
                        final String chileUri = f.toURI().toASCIIString();
                        requestDataSet.add(RequestDataBuilder.newRequestData().get().url(chileUri).build());
                    }
                }
            }
            throw new ChildUrlsException(requestDataSet, this.getClass().getName() + "#getResponseData");
        } else {
            responseData.setHttpStatusCode(Constants.NOT_FOUND_STATUS_CODE);
            responseData.setCharSet(charset);
            responseData.setContentLength(0);
        }
    } catch (final CrawlerSystemException e) {
        CloseableUtil.closeQuietly(responseData);
        throw e;
    } catch (final Exception e) {
        CloseableUtil.closeQuietly(responseData);
        throw new CrawlingAccessException("Could not access " + uri, e);
    }
    return responseData;
}
Also used : FileOwnerAttributeView(java.nio.file.attribute.FileOwnerAttributeView) CrawlingAccessException(org.codelibs.fess.crawler.exception.CrawlingAccessException) BufferedInputStream(java.io.BufferedInputStream) Date(java.util.Date) URISyntaxException(java.net.URISyntaxException) PosixFileAttributeView(java.nio.file.attribute.PosixFileAttributeView) LoggerFactory(org.slf4j.LoggerFactory) AtomicBoolean(java.util.concurrent.atomic.AtomicBoolean) MaxLengthExceededException(org.codelibs.fess.crawler.exception.MaxLengthExceededException) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) AbstractCrawlerClient(org.codelibs.fess.crawler.client.AbstractCrawlerClient) HashSet(java.util.HashSet) UserPrincipal(java.nio.file.attribute.UserPrincipal) URI(java.net.URI) ContentLengthHelper(org.codelibs.fess.crawler.helper.ContentLengthHelper) MimeTypeHelper(org.codelibs.fess.crawler.helper.MimeTypeHelper) InputStreamUtil(org.codelibs.core.io.InputStreamUtil) AclFileAttributeView(java.nio.file.attribute.AclFileAttributeView) Logger(org.slf4j.Logger) Files(java.nio.file.Files) Resource(javax.annotation.Resource) StringUtil(org.codelibs.core.lang.StringUtil) Set(java.util.Set) FileInputStream(java.io.FileInputStream) FileOwnerAttributeView(java.nio.file.attribute.FileOwnerAttributeView) CrawlerContainer(org.codelibs.fess.crawler.container.CrawlerContainer) File(java.io.File) CloseableUtil(org.codelibs.core.io.CloseableUtil) Constants(org.codelibs.fess.crawler.Constants) URLEncoder(java.net.URLEncoder) RequestData(org.codelibs.fess.crawler.entity.RequestData) AccessTimeoutTarget(org.codelibs.fess.crawler.client.AccessTimeoutTarget) TimeoutManager(org.codelibs.core.timer.TimeoutManager) TimeoutTask(org.codelibs.core.timer.TimeoutTask) ChildUrlsException(org.codelibs.fess.crawler.exception.ChildUrlsException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) RequestDataBuilder(org.codelibs.fess.crawler.builder.RequestDataBuilder) InputStream(java.io.InputStream) ResponseData(org.codelibs.fess.crawler.entity.ResponseData) ChildUrlsException(org.codelibs.fess.crawler.exception.ChildUrlsException) AclFileAttributeView(java.nio.file.attribute.AclFileAttributeView) MaxLengthExceededException(org.codelibs.fess.crawler.exception.MaxLengthExceededException) CrawlingAccessException(org.codelibs.fess.crawler.exception.CrawlingAccessException) MimeTypeHelper(org.codelibs.fess.crawler.helper.MimeTypeHelper) BufferedInputStream(java.io.BufferedInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) ResponseData(org.codelibs.fess.crawler.entity.ResponseData) URISyntaxException(java.net.URISyntaxException) URI(java.net.URI) UserPrincipal(java.nio.file.attribute.UserPrincipal) CrawlingAccessException(org.codelibs.fess.crawler.exception.CrawlingAccessException) URISyntaxException(java.net.URISyntaxException) MaxLengthExceededException(org.codelibs.fess.crawler.exception.MaxLengthExceededException) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) ChildUrlsException(org.codelibs.fess.crawler.exception.ChildUrlsException) UnsupportedEncodingException(java.io.UnsupportedEncodingException) Date(java.util.Date) FileInputStream(java.io.FileInputStream) PosixFileAttributeView(java.nio.file.attribute.PosixFileAttributeView) BufferedInputStream(java.io.BufferedInputStream) RequestData(org.codelibs.fess.crawler.entity.RequestData) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) File(java.io.File) HashSet(java.util.HashSet)

Example 12 with CrawlerSystemException

use of org.codelibs.fess.crawler.exception.CrawlerSystemException in project fess-crawler by codelibs.

the class FtpClient method getResponseData.

protected ResponseData getResponseData(final String uri, final boolean includeContent) {
    final ResponseData responseData = new ResponseData();
    FTPClient client = null;
    try {
        responseData.setMethod(Constants.GET_METHOD);
        final FtpInfo ftpInfo = new FtpInfo(uri);
        responseData.setUrl(ftpInfo.toUrl());
        client = getClient(ftpInfo);
        FTPFile file = null;
        client.changeWorkingDirectory(ftpInfo.getParent());
        validateRequest(client);
        if (ftpInfo.getName() == null) {
            // root directory
            final Set<RequestData> requestDataSet = new HashSet<>();
            if (includeContent) {
                try {
                    final FTPFile[] files = client.listFiles(ftpInfo.getParent(), FTPFileFilters.NON_NULL);
                    validateRequest(client);
                    for (final FTPFile f : files) {
                        final String chileUri = ftpInfo.toChildUrl(f.getName());
                        requestDataSet.add(RequestDataBuilder.newRequestData().get().url(chileUri).build());
                    }
                } catch (final IOException e) {
                    disconnectInternalClient(client);
                    throw new CrawlingAccessException("Could not access " + uri, e);
                }
            }
            ftpClientQueue.offer(client);
            throw new ChildUrlsException(requestDataSet, this.getClass().getName() + "#getResponseData");
        }
        final FTPFile[] files = client.listFiles(null, FTPFileFilters.NON_NULL);
        validateRequest(client);
        for (final FTPFile f : files) {
            if (ftpInfo.getName().equals(f.getName())) {
                file = f;
                break;
            }
        }
        updateResponseData(uri, includeContent, responseData, client, ftpInfo, file);
    } catch (final CrawlerSystemException e) {
        CloseableUtil.closeQuietly(responseData);
        throw e;
    } catch (final Exception e) {
        CloseableUtil.closeQuietly(responseData);
        throw new CrawlingAccessException("Could not access " + uri, e);
    }
    return responseData;
}
Also used : ChildUrlsException(org.codelibs.fess.crawler.exception.ChildUrlsException) CrawlingAccessException(org.codelibs.fess.crawler.exception.CrawlingAccessException) ResponseData(org.codelibs.fess.crawler.entity.ResponseData) FTPFile(org.apache.commons.net.ftp.FTPFile) IOException(java.io.IOException) FTPClient(org.apache.commons.net.ftp.FTPClient) CrawlingAccessException(org.codelibs.fess.crawler.exception.CrawlingAccessException) CrawlerLoginFailureException(org.codelibs.fess.crawler.exception.CrawlerLoginFailureException) MaxLengthExceededException(org.codelibs.fess.crawler.exception.MaxLengthExceededException) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) MalformedURLException(java.net.MalformedURLException) IOException(java.io.IOException) ChildUrlsException(org.codelibs.fess.crawler.exception.ChildUrlsException) RequestData(org.codelibs.fess.crawler.entity.RequestData) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) HashSet(java.util.HashSet)

Example 13 with CrawlerSystemException

use of org.codelibs.fess.crawler.exception.CrawlerSystemException in project fess-crawler by codelibs.

the class HcHttpClient method processRobotsTxt.

protected void processRobotsTxt(final String url) {
    if (StringUtil.isBlank(url)) {
        throw new CrawlerSystemException("url is null or empty.");
    }
    if (robotsTxtHelper == null || !robotsTxtHelper.isEnabled()) {
        // not support robots.txt
        return;
    }
    // crawler context
    final CrawlerContext crawlerContext = CrawlingParameterUtil.getCrawlerContext();
    if (crawlerContext == null) {
        // wrong state
        return;
    }
    final int idx = url.indexOf('/', url.indexOf("://") + 3);
    String hostUrl;
    if (idx >= 0) {
        hostUrl = url.substring(0, idx);
    } else {
        hostUrl = url;
    }
    final String robotTxtUrl = hostUrl + "/robots.txt";
    // check url
    if (crawlerContext.getRobotsTxtUrlSet().contains(robotTxtUrl)) {
        if (logger.isDebugEnabled()) {
            logger.debug(robotTxtUrl + " is already visited.");
        }
        return;
    }
    if (logger.isInfoEnabled()) {
        logger.info("Checking URL: " + robotTxtUrl);
    }
    // add url to a set
    crawlerContext.getRobotsTxtUrlSet().add(robotTxtUrl);
    final HttpGet httpGet = new HttpGet(robotTxtUrl);
    // request header
    for (final Header header : requestHeaderList) {
        httpGet.addHeader(header);
    }
    HttpEntity httpEntity = null;
    try {
        // get a content
        final HttpResponse response = executeHttpClient(httpGet);
        httpEntity = response.getEntity();
        final int httpStatusCode = response.getStatusLine().getStatusCode();
        if (httpStatusCode == 200) {
            // check file size
            final Header contentLengthHeader = response.getFirstHeader("Content-Length");
            if (contentLengthHeader != null) {
                final String value = contentLengthHeader.getValue();
                final long contentLength = Long.parseLong(value);
                if (contentLengthHelper != null) {
                    final long maxLength = contentLengthHelper.getMaxLength("text/plain");
                    if (contentLength > maxLength) {
                        throw new MaxLengthExceededException("The content length (" + contentLength + " byte) is over " + maxLength + " byte. The url is " + robotTxtUrl);
                    }
                }
            }
            if (httpEntity != null) {
                final RobotsTxt robotsTxt = robotsTxtHelper.parse(httpEntity.getContent());
                if (robotsTxt != null) {
                    final String[] sitemaps = robotsTxt.getSitemaps();
                    if (sitemaps.length > 0) {
                        crawlerContext.addSitemaps(sitemaps);
                    }
                    final RobotsTxt.Directive directive = robotsTxt.getMatchedDirective(userAgent);
                    if (directive != null) {
                        if (useRobotsTxtDisallows) {
                            for (String urlPattern : directive.getDisallows()) {
                                if (StringUtil.isNotBlank(urlPattern)) {
                                    urlPattern = convertRobotsTxtPathPattern(urlPattern);
                                    crawlerContext.getUrlFilter().addExclude(hostUrl + urlPattern);
                                }
                            }
                        }
                        if (useRobotsTxtAllows) {
                            for (String urlPattern : directive.getAllows()) {
                                if (StringUtil.isNotBlank(urlPattern)) {
                                    urlPattern = convertRobotsTxtPathPattern(urlPattern);
                                    crawlerContext.getUrlFilter().addInclude(hostUrl + urlPattern);
                                }
                            }
                        }
                    }
                }
            }
        }
    } catch (final CrawlerSystemException e) {
        httpGet.abort();
        throw e;
    } catch (final Exception e) {
        httpGet.abort();
        throw new CrawlingAccessException("Could not process " + robotTxtUrl + ". ", e);
    } finally {
        EntityUtils.consumeQuietly(httpEntity);
    }
}
Also used : HttpEntity(org.apache.http.HttpEntity) MaxLengthExceededException(org.codelibs.fess.crawler.exception.MaxLengthExceededException) CrawlingAccessException(org.codelibs.fess.crawler.exception.CrawlingAccessException) HttpGet(org.apache.http.client.methods.HttpGet) HttpResponse(org.apache.http.HttpResponse) CrawlingAccessException(org.codelibs.fess.crawler.exception.CrawlingAccessException) MaxLengthExceededException(org.codelibs.fess.crawler.exception.MaxLengthExceededException) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) ParseException(java.text.ParseException) NoRouteToHostException(java.net.NoRouteToHostException) SocketException(java.net.SocketException) ConnectException(java.net.ConnectException) MalformedURLException(java.net.MalformedURLException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) CrawlerContext(org.codelibs.fess.crawler.CrawlerContext) Header(org.apache.http.Header) BasicHeader(org.apache.http.message.BasicHeader) RobotsTxt(org.codelibs.fess.crawler.entity.RobotsTxt) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException)

Example 14 with CrawlerSystemException

use of org.codelibs.fess.crawler.exception.CrawlerSystemException in project fess-crawler by codelibs.

the class HcHttpClient method processHttpMethod.

protected ResponseData processHttpMethod(final String url, final HttpUriRequest httpRequest) {
    try {
        processRobotsTxt(url);
    } catch (final CrawlingAccessException e) {
        if (logger.isInfoEnabled()) {
            final StringBuilder buf = new StringBuilder(100);
            buf.append(e.getMessage());
            if (e.getCause() != null) {
                buf.append(e.getCause().getMessage());
            }
            logger.info(buf.toString());
        } else if (logger.isDebugEnabled()) {
            logger.debug("Crawling Access Exception at " + url, e);
        }
    }
    // request header
    for (final Header header : requestHeaderList) {
        httpRequest.addHeader(header);
    }
    ResponseData responseData = new ResponseData();
    HttpEntity httpEntity = null;
    try {
        // get a content
        final HttpResponse response = executeHttpClient(httpRequest);
        httpEntity = response.getEntity();
        final int httpStatusCode = response.getStatusLine().getStatusCode();
        // redirect
        if (isRedirectHttpStatus(httpStatusCode)) {
            final Header locationHeader = response.getFirstHeader("location");
            if (locationHeader == null) {
                logger.warn("Invalid redirect location at " + url);
            } else {
                final String redirectLocation;
                if (locationHeader.getValue().startsWith("/")) {
                    redirectLocation = buildRedirectLocation(url, locationHeader.getValue());
                } else {
                    redirectLocation = locationHeader.getValue();
                }
                responseData = new ResponseData();
                responseData.setRedirectLocation(redirectLocation);
                return responseData;
            }
        }
        String contentType = null;
        final Header contentTypeHeader = response.getFirstHeader("Content-Type");
        if (contentTypeHeader != null) {
            contentType = contentTypeHeader.getValue();
            final int idx = contentType.indexOf(';');
            if (idx > 0) {
                contentType = contentType.substring(0, idx);
                if (APPLICATION_OCTET_STREAM.equals(contentType)) {
                    contentType = null;
                }
            }
        }
        long contentLength = 0;
        String contentEncoding = Constants.UTF_8;
        if (httpEntity == null) {
            responseData.setResponseBody(new byte[0]);
            if (contentType == null) {
                contentType = defaultMimeType;
            }
        } else {
            final InputStream responseBodyStream = httpEntity.getContent();
            final File outputFile = File.createTempFile("crawler-HcHttpClient-", ".out");
            DeferredFileOutputStream dfos = null;
            try {
                try {
                    dfos = new DeferredFileOutputStream((int) maxCachedContentSize, outputFile);
                    CopyUtil.copy(responseBodyStream, dfos);
                    dfos.flush();
                } finally {
                    CloseableUtil.closeQuietly(dfos);
                }
            } catch (final Exception e) {
                if (!outputFile.delete()) {
                    logger.warn("Could not delete " + outputFile.getAbsolutePath());
                }
                throw e;
            }
            if (dfos.isInMemory()) {
                responseData.setResponseBody(dfos.getData());
                contentLength = dfos.getData().length;
                if (!outputFile.delete()) {
                    logger.warn("Could not delete " + outputFile.getAbsolutePath());
                }
                if (contentType == null) {
                    try (InputStream is = new ByteArrayInputStream(dfos.getData())) {
                        contentType = mimeTypeHelper.getContentType(is, url);
                    } catch (final Exception e) {
                        logger.debug("Failed to detect mime-type.", e);
                        contentType = defaultMimeType;
                    }
                }
            } else {
                responseData.setResponseBody(outputFile, true);
                contentLength = outputFile.length();
                if (contentType == null) {
                    try (InputStream is = new FileInputStream(outputFile)) {
                        contentType = mimeTypeHelper.getContentType(is, url);
                    } catch (final Exception e) {
                        logger.debug("Failed to detect mime-type.", e);
                        contentType = defaultMimeType;
                    }
                }
            }
            final Header contentEncodingHeader = httpEntity.getContentEncoding();
            if (contentEncodingHeader != null) {
                contentEncoding = contentEncodingHeader.getValue();
            }
        }
        // check file size
        if (contentLengthHelper != null) {
            final long maxLength = contentLengthHelper.getMaxLength(contentType);
            if (contentLength > maxLength) {
                throw new MaxLengthExceededException("The content length (" + contentLength + " byte) is over " + maxLength + " byte. The url is " + url);
            }
        }
        responseData.setUrl(url);
        responseData.setCharSet(contentEncoding);
        if (httpRequest instanceof HttpHead) {
            responseData.setMethod(Constants.HEAD_METHOD);
        } else {
            responseData.setMethod(Constants.GET_METHOD);
        }
        responseData.setHttpStatusCode(httpStatusCode);
        for (final Header header : response.getAllHeaders()) {
            responseData.addMetaData(header.getName(), header.getValue());
        }
        responseData.setMimeType(contentType);
        final Header contentLengthHeader = response.getFirstHeader("Content-Length");
        if (contentLengthHeader == null) {
            responseData.setContentLength(contentLength);
        } else {
            final String value = contentLengthHeader.getValue();
            try {
                responseData.setContentLength(Long.parseLong(value));
            } catch (final Exception e) {
                responseData.setContentLength(contentLength);
            }
        }
        checkMaxContentLength(responseData);
        final Header lastModifiedHeader = response.getFirstHeader("Last-Modified");
        if (lastModifiedHeader != null) {
            final String value = lastModifiedHeader.getValue();
            if (StringUtil.isNotBlank(value)) {
                final Date d = parseLastModified(value);
                if (d != null) {
                    responseData.setLastModified(d);
                }
            }
        }
        return responseData;
    } catch (final UnknownHostException e) {
        closeResources(httpRequest, responseData);
        throw new CrawlingAccessException("Unknown host(" + e.getMessage() + "): " + url, e);
    } catch (final NoRouteToHostException e) {
        closeResources(httpRequest, responseData);
        throw new CrawlingAccessException("No route to host(" + e.getMessage() + "): " + url, e);
    } catch (final ConnectException e) {
        closeResources(httpRequest, responseData);
        throw new CrawlingAccessException("Connection time out(" + e.getMessage() + "): " + url, e);
    } catch (final SocketException e) {
        closeResources(httpRequest, responseData);
        throw new CrawlingAccessException("Socket exception(" + e.getMessage() + "): " + url, e);
    } catch (final IOException e) {
        closeResources(httpRequest, responseData);
        throw new CrawlingAccessException("I/O exception(" + e.getMessage() + "): " + url, e);
    } catch (final CrawlerSystemException e) {
        closeResources(httpRequest, responseData);
        throw e;
    } catch (final Exception e) {
        closeResources(httpRequest, responseData);
        throw new CrawlerSystemException("Failed to access " + url, e);
    } finally {
        EntityUtils.consumeQuietly(httpEntity);
    }
}
Also used : SocketException(java.net.SocketException) HttpEntity(org.apache.http.HttpEntity) UnknownHostException(java.net.UnknownHostException) CrawlingAccessException(org.codelibs.fess.crawler.exception.CrawlingAccessException) MaxLengthExceededException(org.codelibs.fess.crawler.exception.MaxLengthExceededException) ByteArrayInputStream(java.io.ByteArrayInputStream) FileInputStream(java.io.FileInputStream) InputStream(java.io.InputStream) ResponseData(org.codelibs.fess.crawler.entity.ResponseData) HttpResponse(org.apache.http.HttpResponse) IOException(java.io.IOException) NoRouteToHostException(java.net.NoRouteToHostException) CrawlingAccessException(org.codelibs.fess.crawler.exception.CrawlingAccessException) MaxLengthExceededException(org.codelibs.fess.crawler.exception.MaxLengthExceededException) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) ParseException(java.text.ParseException) NoRouteToHostException(java.net.NoRouteToHostException) SocketException(java.net.SocketException) ConnectException(java.net.ConnectException) MalformedURLException(java.net.MalformedURLException) IOException(java.io.IOException) UnknownHostException(java.net.UnknownHostException) FileInputStream(java.io.FileInputStream) HttpHead(org.apache.http.client.methods.HttpHead) Date(java.util.Date) Header(org.apache.http.Header) BasicHeader(org.apache.http.message.BasicHeader) ByteArrayInputStream(java.io.ByteArrayInputStream) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) DeferredFileOutputStream(org.apache.commons.io.output.DeferredFileOutputStream) File(java.io.File) ConnectException(java.net.ConnectException)

Example 15 with CrawlerSystemException

use of org.codelibs.fess.crawler.exception.CrawlerSystemException in project fess-crawler by codelibs.

the class HtmlXpathExtractor method getText.

/*
     * (non-Javadoc)
     *
     * @see org.codelibs.fess.crawler.extractor.Extractor#getText(java.io.InputStream,
     * java.util.Map)
     */
@Override
public ExtractData getText(final InputStream in, final Map<String, String> params) {
    if (in == null) {
        throw new CrawlerSystemException("The inputstream is null.");
    }
    try {
        final BufferedInputStream bis = new BufferedInputStream(in);
        final String enc = getEncoding(bis);
        final DOMParser parser = getDomParser();
        final InputSource inputSource = new InputSource(bis);
        inputSource.setEncoding(enc);
        parser.parse(inputSource);
        final Document document = parser.getDocument();
        final StringBuilder buf = new StringBuilder(255);
        final NodeList nodeList = getXPathAPI().selectNodeList(document, targetNodePath);
        for (int i = 0; i < nodeList.getLength(); i++) {
            final Node node = nodeList.item(i);
            buf.append(node.getTextContent()).append(' ');
        }
        return new ExtractData(buf.toString().replaceAll("\\s+", " ").trim());
    } catch (final Exception e) {
        throw new ExtractException(e);
    }
}
Also used : InputSource(org.xml.sax.InputSource) ExtractException(org.codelibs.fess.crawler.exception.ExtractException) ExtractData(org.codelibs.fess.crawler.entity.ExtractData) NodeList(org.w3c.dom.NodeList) Node(org.w3c.dom.Node) Document(org.w3c.dom.Document) ExtractException(org.codelibs.fess.crawler.exception.ExtractException) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) BufferedInputStream(java.io.BufferedInputStream) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) DOMParser(org.cyberneko.html.parsers.DOMParser)

Aggregations

CrawlerSystemException (org.codelibs.fess.crawler.exception.CrawlerSystemException)41 IOException (java.io.IOException)16 CrawlingAccessException (org.codelibs.fess.crawler.exception.CrawlingAccessException)13 File (java.io.File)11 InputStream (java.io.InputStream)11 UnsupportedEncodingException (java.io.UnsupportedEncodingException)10 BufferedInputStream (java.io.BufferedInputStream)9 ExtractException (org.codelibs.fess.crawler.exception.ExtractException)9 ExtractData (org.codelibs.fess.crawler.entity.ExtractData)8 ResponseData (org.codelibs.fess.crawler.entity.ResponseData)8 Map (java.util.Map)7 MaxLengthExceededException (org.codelibs.fess.crawler.exception.MaxLengthExceededException)7 MalformedURLException (java.net.MalformedURLException)6 HashMap (java.util.HashMap)6 AccessResultDataImpl (org.codelibs.fess.crawler.entity.AccessResultDataImpl)6 RequestData (org.codelibs.fess.crawler.entity.RequestData)6 ResultData (org.codelibs.fess.crawler.entity.ResultData)6 ChildUrlsException (org.codelibs.fess.crawler.exception.ChildUrlsException)6 HashSet (java.util.HashSet)5 TransformerException (javax.xml.transform.TransformerException)5