Search in sources :

Example 26 with EntityDoc

use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.

the class SearchServiceImpl method rebuildSearchResults.

/**
 * Takes a new order and rebuilds a list of search results
 *
 * @param lexiconDefinitions the strong numbers, ordered
 * @param keyedOrder         the set of results to be re-ordered
 * @return a new list of results, now ordered
 */
private List<LexicalSearchEntry> rebuildSearchResults(final List<EntityDoc> lexiconDefinitions, final Map<String, List<LexicalSearchEntry>> keyedOrder) {
    final List<LexicalSearchEntry> newOrder = new ArrayList<LexicalSearchEntry>();
    for (final EntityDoc def : lexiconDefinitions) {
        final List<LexicalSearchEntry> list = keyedOrder.get(def.get(STRONG_NUMBER_FIELD));
        if (list != null) {
            newOrder.addAll(list);
            for (final LexicalSearchEntry e : list) {
                e.setStepGloss(def.get("stepGloss"));
                e.setStepTransliteration(def.get("stepTransliteration"));
                e.setAccentedUnicode(def.get("accentedUnicode"));
                e.setStrongNumber(def.get("strongNumber"));
            }
        }
    }
    return newOrder;
}
Also used : LexicalSearchEntry(com.tyndalehouse.step.core.models.search.LexicalSearchEntry) ArrayList(java.util.ArrayList) EntityDoc(com.tyndalehouse.step.core.data.EntityDoc)

Example 27 with EntityDoc

use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.

the class SearchServiceImpl method findByTransliteration.

/**
 * Runs the transliteration rules on the input in an attempt to match an entry in the lexicon
 *
 * @param query   the query to be found
 * @param isGreek true to indicate Greek, false to indicate Hebrew
 * @return the strongs that have been found/matched.
 */
private Set<String> findByTransliteration(final String query, final boolean isGreek) {
    // first find by transliterations that we have
    final String lowerQuery = query.toLowerCase(Locale.ENGLISH);
    final String simplifiedTransliteration = OriginalWordSuggestionServiceImpl.getSimplifiedTransliterationClause(isGreek, lowerQuery, false);
    final EntityDoc[] specificFormEntities = this.specificForms.searchSingleColumn("simplifiedTransliteration", simplifiedTransliteration, getFilter(isGreek));
    // finally, if we haven't found anything, then abort
    if (specificFormEntities.length != 0) {
        final Set<String> strongs = new HashSet<String>(specificFormEntities.length);
        // nothing to search for..., so abort query
        for (final EntityDoc f : specificFormEntities) {
            strongs.add(f.get(STRONG_NUMBER_FIELD));
        }
        return strongs;
    }
    final MultiFieldQueryParser queryParser = new MultiFieldQueryParser(Version.LUCENE_30, new String[] { "simplifiedTransliteration", "stepTransliteration", "otherTransliteration" }, this.definitions.getAnalyzer());
    try {
        final Query luceneQuery = queryParser.parse("-stopWord:true " + lowerQuery);
        final EntityDoc[] results = this.definitions.search(luceneQuery);
        if (results.length == 0) {
            throw new AbortQueryException("No definitions found for input");
        }
        final Set<String> strongs = new HashSet<String>(results.length);
        for (final EntityDoc d : results) {
            strongs.add(d.get(STRONG_NUMBER_FIELD));
        }
        return strongs;
    } catch (final ParseException e) {
        throw new TranslatedException(e, "search_invalid");
    }
}
Also used : MultiFieldQueryParser(org.apache.lucene.queryParser.MultiFieldQueryParser) Query(org.apache.lucene.search.Query) SearchQuery(com.tyndalehouse.step.core.service.impl.SearchQuery) TranslatedException(com.tyndalehouse.step.core.exceptions.TranslatedException) EntityDoc(com.tyndalehouse.step.core.data.EntityDoc) ParseException(org.apache.lucene.queryParser.ParseException) HashSet(java.util.HashSet) AbortQueryException(com.tyndalehouse.step.core.service.impl.AbortQueryException)

Example 28 with EntityDoc

use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.

the class OriginalWordSuggestionServiceImpl method convertToSuggestionFromSpecificForm.

/**
 * @param specificForm the specific form to be converted
 * @return the suggestion
 */
private LexiconSuggestion convertToSuggestionFromSpecificForm(final EntityDoc specificForm) {
    final String strongNumber = specificForm.get(STRONG_NUMBER_FIELD);
    final EntityDoc[] results = this.definitions.searchExactTermBySingleField(STRONG_NUMBER_FIELD, 1, strongNumber);
    if (results.length > 0) {
        final LexiconSuggestion suggestion = new LexiconSuggestion();
        suggestion.setStrongNumber(strongNumber);
        suggestion.setGloss(results[0].get("stepGloss"));
        suggestion.setMatchingForm(specificForm.get("accentedUnicode"));
        suggestion.setStepTransliteration(specificForm.get("stepTransliteration"));
        return suggestion;
    }
    return null;
}
Also used : LexiconSuggestion(com.tyndalehouse.step.core.models.LexiconSuggestion) EntityDoc(com.tyndalehouse.step.core.data.EntityDoc)

Example 29 with EntityDoc

use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.

the class JSwordStrongNumberHelper method calculateStrongArrayCounts.

/**
 * Calculate counts for an array of Strong number.
 */
public PassageStat calculateStrongArrayCounts(final String version, PassageStat stat, final String userLanguage) {
    Map<String, Integer[]> result = new HashMap<String, Integer[]>(128);
    this.isOT = DivisionName.OLD_TESTAMENT.contains(this.reference.getBook());
    final Versification targetVersification = isOT ? otV11n : ntV11n;
    final Key key = VersificationsMapper.instance().mapVerse(this.reference, targetVersification);
    this.allStrongs = new HashMap<>(256);
    Map<String, Integer[]> temp = stat.getStats();
    temp.forEach((strongNum, feq) -> this.allStrongs.put(strongNum, new BookAndBibleCount()));
    Map<String, EntityDoc> augmentedReferences = new HashMap<>(0);
    // now get counts in the relevant portion of text
    applySearchCounts(getBookFromKey(key), augmentedReferences);
    temp.forEach((strongNum, freq) -> {
        BookAndBibleCount bBCount = this.allStrongs.get(strongNum);
        result.put(strongNum, new Integer[] { freq[0], bBCount.getBook(), bBCount.getBible() });
    });
    stat.setStats(result);
    return stat;
}
Also used : HashMap(java.util.HashMap) EntityDoc(com.tyndalehouse.step.core.data.EntityDoc) Versification(org.crosswire.jsword.versification.Versification) BookAndBibleCount(com.tyndalehouse.step.core.models.search.BookAndBibleCount) Key(org.crosswire.jsword.passage.Key)

Example 30 with EntityDoc

use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.

the class JSwordStrongNumberHelper method applySearchCounts.

/**
 * Applies the search counts for every strong number.
 *
 * @param bookName the book name
 * @param augmentedByStrong the augmented strongs found in the original augmentation querys
 */
private void applySearchCounts(final String bookName, final Map<String, EntityDoc> augmentedByStrong) {
    try {
        final IndexSearcher is = jSwordSearchService.getIndexSearcher(this.isOT ? STRONG_OT_VERSION_BOOK.getInitials() : STRONG_NT_VERSION_BOOK.getInitials());
        final TermDocs termDocs = is.getIndexReader().termDocs();
        for (final Entry<String, BookAndBibleCount> strong : this.allStrongs.entrySet()) {
            final String strongKey = strong.getKey();
            termDocs.seek(new Term(LuceneIndex.FIELD_STRONG, this.strongAugmentationService.reduce(strongKey)));
            final EntityDoc entityDoc = augmentedByStrong.get(strongKey);
            final String references = entityDoc != null ? entityDoc.get("references") : null;
            // we'll never need more than 200 documents as this is the cut off point
            int bible = 0;
            int book = 0;
            while (termDocs.next()) {
                final int freq = termDocs.freq();
                final Document doc = is.doc(termDocs.doc());
                final String docRef = doc.get(LuceneIndex.FIELD_KEY);
                if ((references == null || augmentedVersionInVerse(docRef, references))) {
                    if (docRef != null && docRef.startsWith(bookName)) {
                        book += freq;
                    }
                    bible += freq;
                }
            }
            final BookAndBibleCount value = strong.getValue();
            value.setBible(bible);
            value.setBook(book);
        }
    } catch (final IOException e) {
        throw new StepInternalException(e.getMessage(), e);
    }
}
Also used : IndexSearcher(org.apache.lucene.search.IndexSearcher) StepInternalException(com.tyndalehouse.step.core.exceptions.StepInternalException) TermDocs(org.apache.lucene.index.TermDocs) EntityDoc(com.tyndalehouse.step.core.data.EntityDoc) BookAndBibleCount(com.tyndalehouse.step.core.models.search.BookAndBibleCount) Term(org.apache.lucene.index.Term) IOException(java.io.IOException) Document(org.apache.lucene.document.Document)

Aggregations

EntityDoc (com.tyndalehouse.step.core.data.EntityDoc)39 ArrayList (java.util.ArrayList)14 HashMap (java.util.HashMap)6 HashSet (java.util.HashSet)6 StepInternalException (com.tyndalehouse.step.core.exceptions.StepInternalException)5 TranslatedException (com.tyndalehouse.step.core.exceptions.TranslatedException)5 LexiconSuggestion (com.tyndalehouse.step.core.models.LexiconSuggestion)5 ParseException (org.apache.lucene.queryParser.ParseException)5 SearchQuery (com.tyndalehouse.step.core.service.impl.SearchQuery)4 IOException (java.io.IOException)4 MultiFieldQueryParser (org.apache.lucene.queryParser.MultiFieldQueryParser)4 Query (org.apache.lucene.search.Query)4 BookAndBibleCount (com.tyndalehouse.step.core.models.search.BookAndBibleCount)3 SearchEntry (com.tyndalehouse.step.core.models.search.SearchEntry)3 SearchResult (com.tyndalehouse.step.core.models.search.SearchResult)3 QueryParser (org.apache.lucene.queryParser.QueryParser)3 Key (org.crosswire.jsword.passage.Key)3 ExpandableSubjectHeadingEntry (com.tyndalehouse.step.core.models.search.ExpandableSubjectHeadingEntry)2 LexicalSearchEntry (com.tyndalehouse.step.core.models.search.LexicalSearchEntry)2 SubjectHeadingSearchEntry (com.tyndalehouse.step.core.models.search.SubjectHeadingSearchEntry)2