Search in sources :

Example 1 with TaskCancelledException

use of org.elasticsearch.tasks.TaskCancelledException in project elasticsearch by elastic.

the class FetchPhase method execute.

@Override
public void execute(SearchContext context) {
    final FieldsVisitor fieldsVisitor;
    Set<String> fieldNames = null;
    List<String> fieldNamePatterns = null;
    StoredFieldsContext storedFieldsContext = context.storedFieldsContext();
    if (storedFieldsContext == null) {
        // no fields specified, default to return source if no explicit indication
        if (!context.hasScriptFields() && !context.hasFetchSourceContext()) {
            context.fetchSourceContext(new FetchSourceContext(true));
        }
        fieldsVisitor = new FieldsVisitor(context.sourceRequested());
    } else if (storedFieldsContext.fetchFields() == false) {
        // disable stored fields entirely
        fieldsVisitor = null;
    } else {
        for (String fieldName : context.storedFieldsContext().fieldNames()) {
            if (fieldName.equals(SourceFieldMapper.NAME)) {
                FetchSourceContext fetchSourceContext = context.hasFetchSourceContext() ? context.fetchSourceContext() : FetchSourceContext.FETCH_SOURCE;
                context.fetchSourceContext(new FetchSourceContext(true, fetchSourceContext.includes(), fetchSourceContext.excludes()));
                continue;
            }
            if (Regex.isSimpleMatchPattern(fieldName)) {
                if (fieldNamePatterns == null) {
                    fieldNamePatterns = new ArrayList<>();
                }
                fieldNamePatterns.add(fieldName);
            } else {
                MappedFieldType fieldType = context.smartNameFieldType(fieldName);
                if (fieldType == null) {
                    // Only fail if we know it is a object field, missing paths / fields shouldn't fail.
                    if (context.getObjectMapper(fieldName) != null) {
                        throw new IllegalArgumentException("field [" + fieldName + "] isn't a leaf field");
                    }
                }
                if (fieldNames == null) {
                    fieldNames = new HashSet<>();
                }
                fieldNames.add(fieldName);
            }
        }
        boolean loadSource = context.sourceRequested();
        if (fieldNames == null && fieldNamePatterns == null) {
            // empty list specified, default to disable _source if no explicit indication
            fieldsVisitor = new FieldsVisitor(loadSource);
        } else {
            fieldsVisitor = new CustomFieldsVisitor(fieldNames == null ? Collections.emptySet() : fieldNames, fieldNamePatterns == null ? Collections.emptyList() : fieldNamePatterns, loadSource);
        }
    }
    SearchHit[] hits = new SearchHit[context.docIdsToLoadSize()];
    FetchSubPhase.HitContext hitContext = new FetchSubPhase.HitContext();
    for (int index = 0; index < context.docIdsToLoadSize(); index++) {
        if (context.isCancelled()) {
            throw new TaskCancelledException("cancelled");
        }
        int docId = context.docIdsToLoad()[context.docIdsToLoadFrom() + index];
        int readerIndex = ReaderUtil.subIndex(docId, context.searcher().getIndexReader().leaves());
        LeafReaderContext subReaderContext = context.searcher().getIndexReader().leaves().get(readerIndex);
        int subDocId = docId - subReaderContext.docBase;
        final SearchHit searchHit;
        try {
            int rootDocId = findRootDocumentIfNested(context, subReaderContext, subDocId);
            if (rootDocId != -1) {
                searchHit = createNestedSearchHit(context, docId, subDocId, rootDocId, fieldNames, fieldNamePatterns, subReaderContext);
            } else {
                searchHit = createSearchHit(context, fieldsVisitor, docId, subDocId, subReaderContext);
            }
        } catch (IOException e) {
            throw ExceptionsHelper.convertToElastic(e);
        }
        hits[index] = searchHit;
        hitContext.reset(searchHit, subReaderContext, subDocId, context.searcher());
        for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
            fetchSubPhase.hitExecute(context, hitContext);
        }
    }
    for (FetchSubPhase fetchSubPhase : fetchSubPhases) {
        fetchSubPhase.hitsExecute(context, hits);
    }
    context.fetchResult().hits(new SearchHits(hits, context.queryResult().topDocs().totalHits, context.queryResult().topDocs().getMaxScore()));
}
Also used : FieldsVisitor(org.elasticsearch.index.fieldvisitor.FieldsVisitor) CustomFieldsVisitor(org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor) SearchHit(org.elasticsearch.search.SearchHit) ArrayList(java.util.ArrayList) IOException(java.io.IOException) FetchSourceContext(org.elasticsearch.search.fetch.subphase.FetchSourceContext) CustomFieldsVisitor(org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor) MappedFieldType(org.elasticsearch.index.mapper.MappedFieldType) InnerHitsFetchSubPhase(org.elasticsearch.search.fetch.subphase.InnerHitsFetchSubPhase) LeafReaderContext(org.apache.lucene.index.LeafReaderContext) TaskCancelledException(org.elasticsearch.tasks.TaskCancelledException) SearchHits(org.elasticsearch.search.SearchHits) HashSet(java.util.HashSet)

Example 2 with TaskCancelledException

use of org.elasticsearch.tasks.TaskCancelledException in project elasticsearch by elastic.

the class DfsPhase method execute.

@Override
public void execute(SearchContext context) {
    final ObjectHashSet<Term> termsSet = new ObjectHashSet<>();
    try {
        context.searcher().createNormalizedWeight(context.query(), true).extractTerms(new DelegateSet(termsSet));
        for (RescoreSearchContext rescoreContext : context.rescore()) {
            rescoreContext.rescorer().extractTerms(context, rescoreContext, new DelegateSet(termsSet));
        }
        Term[] terms = termsSet.toArray(Term.class);
        TermStatistics[] termStatistics = new TermStatistics[terms.length];
        IndexReaderContext indexReaderContext = context.searcher().getTopReaderContext();
        for (int i = 0; i < terms.length; i++) {
            if (context.isCancelled()) {
                throw new TaskCancelledException("cancelled");
            }
            // LUCENE 4 UPGRADE: cache TermContext?
            TermContext termContext = TermContext.build(indexReaderContext, terms[i]);
            termStatistics[i] = context.searcher().termStatistics(terms[i], termContext);
        }
        ObjectObjectHashMap<String, CollectionStatistics> fieldStatistics = HppcMaps.newNoNullKeysMap();
        for (Term term : terms) {
            assert term.field() != null : "field is null";
            if (!fieldStatistics.containsKey(term.field())) {
                final CollectionStatistics collectionStatistics = context.searcher().collectionStatistics(term.field());
                fieldStatistics.put(term.field(), collectionStatistics);
                if (context.isCancelled()) {
                    throw new TaskCancelledException("cancelled");
                }
            }
        }
        context.dfsResult().termsStatistics(terms, termStatistics).fieldStatistics(fieldStatistics).maxDoc(context.searcher().getIndexReader().maxDoc());
    } catch (Exception e) {
        throw new DfsPhaseExecutionException(context, "Exception during dfs phase", e);
    } finally {
        // don't hold on to terms
        termsSet.clear();
    }
}
Also used : RescoreSearchContext(org.elasticsearch.search.rescore.RescoreSearchContext) Term(org.apache.lucene.index.Term) TermStatistics(org.apache.lucene.search.TermStatistics) IndexReaderContext(org.apache.lucene.index.IndexReaderContext) TermContext(org.apache.lucene.index.TermContext) SearchContextException(org.elasticsearch.search.SearchContextException) TaskCancelledException(org.elasticsearch.tasks.TaskCancelledException) CollectionStatistics(org.apache.lucene.search.CollectionStatistics) ObjectHashSet(com.carrotsearch.hppc.ObjectHashSet) TaskCancelledException(org.elasticsearch.tasks.TaskCancelledException)

Aggregations

TaskCancelledException (org.elasticsearch.tasks.TaskCancelledException)2 ObjectHashSet (com.carrotsearch.hppc.ObjectHashSet)1 IOException (java.io.IOException)1 ArrayList (java.util.ArrayList)1 HashSet (java.util.HashSet)1 IndexReaderContext (org.apache.lucene.index.IndexReaderContext)1 LeafReaderContext (org.apache.lucene.index.LeafReaderContext)1 Term (org.apache.lucene.index.Term)1 TermContext (org.apache.lucene.index.TermContext)1 CollectionStatistics (org.apache.lucene.search.CollectionStatistics)1 TermStatistics (org.apache.lucene.search.TermStatistics)1 CustomFieldsVisitor (org.elasticsearch.index.fieldvisitor.CustomFieldsVisitor)1 FieldsVisitor (org.elasticsearch.index.fieldvisitor.FieldsVisitor)1 MappedFieldType (org.elasticsearch.index.mapper.MappedFieldType)1 SearchContextException (org.elasticsearch.search.SearchContextException)1 SearchHit (org.elasticsearch.search.SearchHit)1 SearchHits (org.elasticsearch.search.SearchHits)1 FetchSourceContext (org.elasticsearch.search.fetch.subphase.FetchSourceContext)1 InnerHitsFetchSubPhase (org.elasticsearch.search.fetch.subphase.InnerHitsFetchSubPhase)1 RescoreSearchContext (org.elasticsearch.search.rescore.RescoreSearchContext)1