Search in sources :

Example 11 with ExtractorFactory

use of org.codelibs.fess.crawler.extractor.ExtractorFactory in project fess-crawler by codelibs.

the class LhaExtractor method getText.

@Override
public ExtractData getText(final InputStream in, final Map<String, String> params) {
    if (in == null) {
        throw new CrawlerSystemException("The inputstream is null.");
    }
    final MimeTypeHelper mimeTypeHelper = getMimeTypeHelper();
    final ExtractorFactory extractorFactory = getExtractorFactory();
    final StringBuilder buf = new StringBuilder(1000);
    File tempFile = null;
    LhaFile lhaFile = null;
    try {
        tempFile = File.createTempFile("crawler-", ".lzh");
        try (FileOutputStream fos = new FileOutputStream(tempFile)) {
            CopyUtil.copy(in, fos);
        }
        lhaFile = new LhaFile(tempFile);
        @SuppressWarnings("unchecked") final Enumeration<LhaHeader> entries = lhaFile.entries();
        long contentSize = 0;
        while (entries.hasMoreElements()) {
            final LhaHeader head = entries.nextElement();
            contentSize += head.getOriginalSize();
            if (maxContentSize != -1 && contentSize > maxContentSize) {
                throw new MaxLengthExceededException("Extracted size is " + contentSize + " > " + maxContentSize);
            }
            final String filename = head.getPath();
            final String mimeType = mimeTypeHelper.getContentType(null, filename);
            if (mimeType != null) {
                final Extractor extractor = extractorFactory.getExtractor(mimeType);
                if (extractor != null) {
                    InputStream is = null;
                    try {
                        is = lhaFile.getInputStream(head);
                        final Map<String, String> map = new HashMap<>();
                        map.put(TikaMetadataKeys.RESOURCE_NAME_KEY, filename);
                        buf.append(extractor.getText(new IgnoreCloseInputStream(is), map).getContent());
                        buf.append('\n');
                    } catch (final Exception e) {
                        if (logger.isDebugEnabled()) {
                            logger.debug("Exception in an internal extractor.", e);
                        }
                    } finally {
                        CloseableUtil.closeQuietly(is);
                    }
                }
            }
        }
    } catch (final MaxLengthExceededException e) {
        throw e;
    } catch (final Exception e) {
        throw new ExtractException("Could not extract a content.", e);
    } finally {
        if (lhaFile != null) {
            try {
                lhaFile.close();
            } catch (final IOException e) {
            // ignore
            }
        }
        if (tempFile != null && !tempFile.delete()) {
            logger.warn("Failed to delete " + tempFile.getAbsolutePath());
        }
    }
    return new ExtractData(buf.toString().trim());
}
Also used : ExtractException(org.codelibs.fess.crawler.exception.ExtractException) ExtractData(org.codelibs.fess.crawler.entity.ExtractData) MaxLengthExceededException(org.codelibs.fess.crawler.exception.MaxLengthExceededException) HashMap(java.util.HashMap) MimeTypeHelper(org.codelibs.fess.crawler.helper.MimeTypeHelper) ExtractorFactory(org.codelibs.fess.crawler.extractor.ExtractorFactory) IgnoreCloseInputStream(org.codelibs.fess.crawler.util.IgnoreCloseInputStream) InputStream(java.io.InputStream) LhaFile(jp.gr.java_conf.dangan.util.lha.LhaFile) IOException(java.io.IOException) IOException(java.io.IOException) ExtractException(org.codelibs.fess.crawler.exception.ExtractException) MaxLengthExceededException(org.codelibs.fess.crawler.exception.MaxLengthExceededException) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) LhaHeader(jp.gr.java_conf.dangan.util.lha.LhaHeader) CrawlerSystemException(org.codelibs.fess.crawler.exception.CrawlerSystemException) FileOutputStream(java.io.FileOutputStream) Extractor(org.codelibs.fess.crawler.extractor.Extractor) LhaFile(jp.gr.java_conf.dangan.util.lha.LhaFile) File(java.io.File) IgnoreCloseInputStream(org.codelibs.fess.crawler.util.IgnoreCloseInputStream)

Example 12 with ExtractorFactory

use of org.codelibs.fess.crawler.extractor.ExtractorFactory in project fess-crawler by codelibs.

the class CrawlerTest method setUp.

@Override
protected void setUp() throws Exception {
    super.setUp();
    final Map<String, String> featureMap = newHashMap();
    featureMap.put("http://xml.org/sax/features/namespaces", "false");
    final Map<String, String> propertyMap = newHashMap();
    final Map<String, String> childUrlRuleMap = newHashMap();
    childUrlRuleMap.put("//A", "href");
    childUrlRuleMap.put("//AREA", "href");
    childUrlRuleMap.put("//FRAME", "src");
    childUrlRuleMap.put("//IFRAME", "src");
    childUrlRuleMap.put("//IMG", "src");
    childUrlRuleMap.put("//LINK", "href");
    childUrlRuleMap.put("//SCRIPT", "src");
    container = new StandardCrawlerContainer();
    container.<HcHttpClient>prototype("internalHttpClient", HcHttpClient.class, client -> {
        client.setCookieSpec(CookieSpecs.BEST_MATCH);
        client.setClientConnectionManager(container.getComponent("clientConnectionManager"));
    }).prototype("httpClient", FaultTolerantClient.class, client -> {
        client.setCrawlerClient(container.getComponent("internalHttpClient"));
        client.setMaxRetryCount(5);
        client.setRetryInterval(500);
    }).prototype("fsClient", FileSystemClient.class).prototype("ruleManager", RuleManagerImpl.class, manager -> {
        manager.addRule(container.getComponent("sitemapsRule"));
        manager.addRule(container.getComponent("fileRule"));
    }).prototype("accessResult", AccessResultImpl.class).prototype("urlQueue", UrlQueueImpl.class).prototype("crawlerThread", CrawlerThread.class).prototype("crawler", Crawler.class).prototype("urlFilterService", UrlFilterServiceImpl.class).prototype("urlQueueService", UrlQueueServiceImpl.class).prototype("dataService", DataServiceImpl.class).prototype("urlFilter", UrlFilterImpl.class).singleton("urlConvertHelper", UrlConvertHelper.class).singleton("intervalController", DefaultIntervalController.class).singleton("sitemapsHelper", SitemapsHelper.class).singleton("logHelper", LogHelperImpl.class).singleton("encodingHelper", EncodingHelper.class).singleton("contentLengthHelper", ContentLengthHelper.class).singleton("mimeTypeHelper", MimeTypeHelperImpl.class).<FileTransformer>singleton("fileTransformer", FileTransformer.class, transformer -> {
        transformer.setName("fileTransformer");
        transformer.setFeatureMap(featureMap);
        transformer.setPropertyMap(propertyMap);
        transformer.setChildUrlRuleMap(childUrlRuleMap);
    }).singleton("dataHelper", MemoryDataHelper.class).singleton("robotsTxtHelper", RobotsTxtHelper.class).<CrawlerClientFactory>singleton("clientFactory", CrawlerClientFactory.class, factory -> {
        factory.addClient("http:.*", container.getComponent("httpClient"));
        factory.addClient("file:.*", container.getComponent("fsClient"));
    }).singleton("tikaExtractor", TikaExtractor.class).<ExtractorFactory>singleton("extractorFactory", ExtractorFactory.class, factory -> {
        TikaExtractor tikaExtractor = container.getComponent("tikaExtractor");
        factory.addExtractor("text/plain", tikaExtractor);
        factory.addExtractor("text/html", tikaExtractor);
    }).singleton("httpClient", // 
    HcHttpClient.class).singleton("sitemapsResponseProcessor", // 
    SitemapsResponseProcessor.class).<SitemapsRule>singleton("sitemapsRule", SitemapsRule.class, rule -> {
        rule.setResponseProcessor(container.getComponent("sitemapsResponseProcessor"));
        rule.setRuleId("sitemapsRule");
        rule.addRule("url", ".*sitemap.*");
    }).<// 
    DefaultResponseProcessor>singleton("defaultResponseProcessor", DefaultResponseProcessor.class, processor -> {
        processor.setTransformer(container.getComponent("fileTransformer"));
        processor.setSuccessfulHttpCodes(new int[] { 200 });
        processor.setNotModifiedHttpCodes(new int[] { 304 });
    }).<// 
    RegexRule>singleton("fileRule", RegexRule.class, rule -> {
        rule.setRuleId("fileRule");
        rule.setDefaultRule(true);
        rule.setResponseProcessor(container.getComponent("defaultResponseProcessor"));
    }).<// 
    PoolingHttpClientConnectionManager>singleton("clientConnectionManager", new PoolingHttpClientConnectionManager(5, TimeUnit.MINUTES), manager -> {
        manager.setMaxTotal(200);
        manager.setDefaultMaxPerRoute(20);
    });
    crawler = container.getComponent("crawler");
    dataService = container.getComponent("dataService");
    urlQueueService = container.getComponent("urlQueueService");
    fileTransformer = container.getComponent("fileTransformer");
}
Also used : StandardCrawlerContainer(org.codelibs.fess.crawler.container.StandardCrawlerContainer) MimeTypeHelperImpl(org.codelibs.fess.crawler.helper.impl.MimeTypeHelperImpl) HcHttpClient(org.codelibs.fess.crawler.client.http.HcHttpClient) UrlQueueImpl(org.codelibs.fess.crawler.entity.UrlQueueImpl) PlainTestCase(org.dbflute.utflute.core.PlainTestCase) ExtractorFactory(org.codelibs.fess.crawler.extractor.ExtractorFactory) FileSystemClient(org.codelibs.fess.crawler.client.fs.FileSystemClient) DataService(org.codelibs.fess.crawler.service.DataService) CookieSpecs(org.apache.http.client.config.CookieSpecs) SitemapsHelper(org.codelibs.fess.crawler.helper.SitemapsHelper) DefaultResponseProcessor(org.codelibs.fess.crawler.processor.impl.DefaultResponseProcessor) RuleManagerImpl(org.codelibs.fess.crawler.rule.impl.RuleManagerImpl) DataServiceImpl(org.codelibs.fess.crawler.service.impl.DataServiceImpl) RegexRule(org.codelibs.fess.crawler.rule.impl.RegexRule) FaultTolerantClient(org.codelibs.fess.crawler.client.FaultTolerantClient) LogHelperImpl(org.codelibs.fess.crawler.helper.impl.LogHelperImpl) SitemapsRule(org.codelibs.fess.crawler.rule.impl.SitemapsRule) Map(java.util.Map) UrlFilterServiceImpl(org.codelibs.fess.crawler.service.impl.UrlFilterServiceImpl) PoolingHttpClientConnectionManager(org.apache.http.impl.conn.PoolingHttpClientConnectionManager) CrawlerWebServer(org.codelibs.fess.crawler.util.CrawlerWebServer) ContentLengthHelper(org.codelibs.fess.crawler.helper.ContentLengthHelper) EncodingHelper(org.codelibs.fess.crawler.helper.EncodingHelper) RobotsTxtHelper(org.codelibs.fess.crawler.helper.RobotsTxtHelper) AccessResultImpl(org.codelibs.fess.crawler.entity.AccessResultImpl) SitemapsResponseProcessor(org.codelibs.fess.crawler.processor.impl.SitemapsResponseProcessor) UrlFilterImpl(org.codelibs.fess.crawler.filter.impl.UrlFilterImpl) MemoryDataHelper(org.codelibs.fess.crawler.helper.MemoryDataHelper) DefaultIntervalController(org.codelibs.fess.crawler.interval.impl.DefaultIntervalController) ResourceUtil(org.codelibs.core.io.ResourceUtil) TikaExtractor(org.codelibs.fess.crawler.extractor.impl.TikaExtractor) File(java.io.File) FileTransformer(org.codelibs.fess.crawler.transformer.impl.FileTransformer) TimeUnit(java.util.concurrent.TimeUnit) UrlQueueService(org.codelibs.fess.crawler.service.UrlQueueService) UrlConvertHelper(org.codelibs.fess.crawler.helper.UrlConvertHelper) UrlQueueServiceImpl(org.codelibs.fess.crawler.service.impl.UrlQueueServiceImpl) CrawlerClientFactory(org.codelibs.fess.crawler.client.CrawlerClientFactory) UrlQueue(org.codelibs.fess.crawler.entity.UrlQueue) LogHelperImpl(org.codelibs.fess.crawler.helper.impl.LogHelperImpl) ContentLengthHelper(org.codelibs.fess.crawler.helper.ContentLengthHelper) HcHttpClient(org.codelibs.fess.crawler.client.http.HcHttpClient) DefaultIntervalController(org.codelibs.fess.crawler.interval.impl.DefaultIntervalController) RobotsTxtHelper(org.codelibs.fess.crawler.helper.RobotsTxtHelper) CrawlerClientFactory(org.codelibs.fess.crawler.client.CrawlerClientFactory) ExtractorFactory(org.codelibs.fess.crawler.extractor.ExtractorFactory) StandardCrawlerContainer(org.codelibs.fess.crawler.container.StandardCrawlerContainer) UrlQueueImpl(org.codelibs.fess.crawler.entity.UrlQueueImpl) RuleManagerImpl(org.codelibs.fess.crawler.rule.impl.RuleManagerImpl) PoolingHttpClientConnectionManager(org.apache.http.impl.conn.PoolingHttpClientConnectionManager) FaultTolerantClient(org.codelibs.fess.crawler.client.FaultTolerantClient) FileTransformer(org.codelibs.fess.crawler.transformer.impl.FileTransformer) UrlFilterImpl(org.codelibs.fess.crawler.filter.impl.UrlFilterImpl) DefaultResponseProcessor(org.codelibs.fess.crawler.processor.impl.DefaultResponseProcessor) SitemapsRule(org.codelibs.fess.crawler.rule.impl.SitemapsRule) TikaExtractor(org.codelibs.fess.crawler.extractor.impl.TikaExtractor) RegexRule(org.codelibs.fess.crawler.rule.impl.RegexRule) UrlQueueServiceImpl(org.codelibs.fess.crawler.service.impl.UrlQueueServiceImpl)

Example 13 with ExtractorFactory

use of org.codelibs.fess.crawler.extractor.ExtractorFactory in project fess-crawler by codelibs.

the class TextTransformerTest method setUp.

@Override
protected void setUp() throws Exception {
    super.setUp();
    StandardCrawlerContainer container = new StandardCrawlerContainer().singleton("extractorFactory", ExtractorFactory.class).singleton("textTransformer", TextTransformer.class).singleton("tikaExtractor", TikaExtractor.class);
    textTransformer = container.getComponent("textTransformer");
    textTransformer.setName("textTransformer");
    ExtractorFactory extractorFactory = container.getComponent("extractorFactory");
    TikaExtractor tikaExtractor = container.getComponent("tikaExtractor");
    extractorFactory.addExtractor("text/plain", tikaExtractor);
    extractorFactory.addExtractor("text/html", tikaExtractor);
}
Also used : ExtractorFactory(org.codelibs.fess.crawler.extractor.ExtractorFactory) StandardCrawlerContainer(org.codelibs.fess.crawler.container.StandardCrawlerContainer) TikaExtractor(org.codelibs.fess.crawler.extractor.impl.TikaExtractor)

Example 14 with ExtractorFactory

use of org.codelibs.fess.crawler.extractor.ExtractorFactory in project fess-crawler by codelibs.

the class EmlExtractorTest method setUp.

@Override
protected void setUp() throws Exception {
    super.setUp();
    StandardCrawlerContainer container = new StandardCrawlerContainer().singleton("emlExtractor", EmlExtractor.class);
    container.singleton("mimeTypeHelper", MimeTypeHelperImpl.class).singleton("tikaExtractor", TikaExtractor.class).singleton("zipExtractor", ZipExtractor.class).<ExtractorFactory>singleton("extractorFactory", ExtractorFactory.class, factory -> {
        TikaExtractor tikaExtractor = container.getComponent("tikaExtractor");
        factory.addExtractor("application/pdf", tikaExtractor);
    });
    emlExtractor = container.getComponent("emlExtractor");
}
Also used : ExtractorFactory(org.codelibs.fess.crawler.extractor.ExtractorFactory) StandardCrawlerContainer(org.codelibs.fess.crawler.container.StandardCrawlerContainer) MimeTypeHelperImpl(org.codelibs.fess.crawler.helper.impl.MimeTypeHelperImpl)

Example 15 with ExtractorFactory

use of org.codelibs.fess.crawler.extractor.ExtractorFactory in project fess-crawler by codelibs.

the class TikaExtractorTest method setUp.

@Override
protected void setUp() throws Exception {
    super.setUp();
    StandardCrawlerContainer container = new StandardCrawlerContainer();
    container.singleton("mimeTypeHelper", MimeTypeHelperImpl.class).singleton("tikaExtractor", TikaExtractor.class).<ExtractorFactory>singleton("extractorFactory", ExtractorFactory.class, factory -> {
        TikaExtractor tikaExtractor = container.getComponent("tikaExtractor");
        factory.addExtractor("text/plain", tikaExtractor);
        factory.addExtractor("text/html", tikaExtractor);
    });
    tikaExtractor = container.getComponent("tikaExtractor");
}
Also used : ExtractorFactory(org.codelibs.fess.crawler.extractor.ExtractorFactory) StandardCrawlerContainer(org.codelibs.fess.crawler.container.StandardCrawlerContainer)

Aggregations

ExtractorFactory (org.codelibs.fess.crawler.extractor.ExtractorFactory)15 StandardCrawlerContainer (org.codelibs.fess.crawler.container.StandardCrawlerContainer)7 Extractor (org.codelibs.fess.crawler.extractor.Extractor)6 MimeTypeHelperImpl (org.codelibs.fess.crawler.helper.impl.MimeTypeHelperImpl)5 HashMap (java.util.HashMap)4 CrawlerSystemException (org.codelibs.fess.crawler.exception.CrawlerSystemException)4 MimeTypeHelper (org.codelibs.fess.crawler.helper.MimeTypeHelper)4 InputStream (java.io.InputStream)3 ExtractData (org.codelibs.fess.crawler.entity.ExtractData)3 ExtractException (org.codelibs.fess.crawler.exception.ExtractException)3 File (java.io.File)2 IOException (java.io.IOException)2 UnsupportedEncodingException (java.io.UnsupportedEncodingException)2 ArchiveStreamFactory (org.apache.commons.compress.archivers.ArchiveStreamFactory)2 MaxLengthExceededException (org.codelibs.fess.crawler.exception.MaxLengthExceededException)2 TikaExtractor (org.codelibs.fess.crawler.extractor.impl.TikaExtractor)2 FessSystemException (org.codelibs.fess.exception.FessSystemException)2 BufferedInputStream (java.io.BufferedInputStream)1 FileOutputStream (java.io.FileOutputStream)1 ParseException (java.text.ParseException)1