Search in sources :

Example 31 with EntityDoc

use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.

the class JSwordStrongNumberHelper method readDataFromLexicon.

/**
 * Read data from lexicon.
 *
 * @param reader        the reader
 * @param verseRef      the verse ref
 * @param augmentedStrongNumbers the strong numbers
 */
private void readDataFromLexicon(final EntityIndexReader reader, final String verseRef, final String augmentedStrongNumbers, final String userLanguage) {
    final EntityDoc[] docs = reader.search("strongNumber", augmentedStrongNumbers);
    final List<LexiconSuggestion> verseSuggestions = new ArrayList<>();
    Map<String, LexiconSuggestion> suggestionsFromSearch = new HashMap<>(docs.length * 2);
    for (final EntityDoc d : docs) {
        final LexiconSuggestion ls = new LexiconSuggestion();
        ls.setStrongNumber(d.get("strongNumber"));
        ls.setGloss(d.get("stepGloss"));
        if (userLanguage.equalsIgnoreCase("es")) {
            ls.set_es_Gloss(d.get("es_Gloss"));
        } else if (userLanguage.equalsIgnoreCase("zh")) {
            ls.set_zh_Gloss(d.get("zh_Gloss"));
        } else if (userLanguage.equalsIgnoreCase("zh_tw")) {
            ls.set_zh_tw_Gloss(d.get("zh_tw_Gloss"));
        }
        ls.setMatchingForm(d.get("accentedUnicode"));
        ls.setStepTransliteration(d.get("stepTransliteration"));
        suggestionsFromSearch.put(ls.getStrongNumber(), ls);
        this.allStrongs.put(ls.getStrongNumber(), new BookAndBibleCount());
    }
    String[] strongs = StringUtils.split(augmentedStrongNumbers);
    for (String s : strongs) {
        verseSuggestions.add(suggestionsFromSearch.get(s));
    }
    this.verseStrongs.put(verseRef, verseSuggestions);
}
Also used : LexiconSuggestion(com.tyndalehouse.step.core.models.LexiconSuggestion) HashMap(java.util.HashMap) EntityDoc(com.tyndalehouse.step.core.data.EntityDoc) ArrayList(java.util.ArrayList) BookAndBibleCount(com.tyndalehouse.step.core.models.search.BookAndBibleCount)

Example 32 with EntityDoc

use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.

the class JSwordStrongNumberHelper method calculateCounts.

/**
 * Calculate counts for a particular key.
 */
private void calculateCounts(String userLanguage) {
    try {
        // is key OT or NT
        final BibleBook book = this.reference.getBook();
        this.isOT = DivisionName.OLD_TESTAMENT.contains(book);
        final Versification targetVersification = isOT ? otV11n : ntV11n;
        final Key key = VersificationsMapper.instance().mapVerse(this.reference, targetVersification);
        this.verseStrongs = new TreeMap<>();
        this.allStrongs = new HashMap<>(256);
        final Book preferredCountBook = getPreferredCountBook(this.isOT);
        final List<Element> elements = JSwordUtils.getOsisElements(new BookData(preferredCountBook, key));
        Map<String, EntityDoc> augmentedReferences = new HashMap<>(16);
        for (final Element e : elements) {
            final String verseRef = e.getAttributeValue(OSISUtil.OSIS_ATTR_OSISID);
            final String strongsNumbers = OSISUtil.getStrongsNumbers(e);
            if (StringUtils.isBlank(strongsNumbers)) {
                LOG.warn("Attempting to search for 'no strongs' in verse [{}]", verseRef);
                return;
            }
            final String strongQuery = StringConversionUtils.getStrongPaddedKey(strongsNumbers);
            final StrongAugmentationService.AugmentedStrongs augmentedStrongs = strongAugmentationService.augment(preferredCountBook.getInitials(), verseRef, strongQuery);
            final String augmentedStrongNumbers = StringUtils.join(augmentedStrongs.getStrongList(), ' ');
            readDataFromLexicon(this.definitions, verseRef, augmentedStrongNumbers, userLanguage);
            // build references that apply to each augmented strong number
            final EntityDoc[] entityDocs = augmentedStrongs.getEntityDocs();
            for (EntityDoc ed : entityDocs) {
                final String augmentedStrong = ed.get("augmentedStrong");
                augmentedReferences.put(augmentedStrong, ed);
            }
        }
        // now get counts in the relevant portion of text
        applySearchCounts(getBookFromKey(key), augmentedReferences);
    } catch (final NoSuchKeyException ex) {
        LOG.warn("Unable to enhance verse numbers.", ex);
    } catch (final BookException ex) {
        LOG.warn("Unable to enhance verse number", ex);
    }
}
Also used : BibleBook(org.crosswire.jsword.versification.BibleBook) HashMap(java.util.HashMap) StrongAugmentationService(com.tyndalehouse.step.core.service.StrongAugmentationService) Element(org.jdom2.Element) Versification(org.crosswire.jsword.versification.Versification) BookException(org.crosswire.jsword.book.BookException) NoSuchKeyException(org.crosswire.jsword.passage.NoSuchKeyException) BibleBook(org.crosswire.jsword.versification.BibleBook) Book(org.crosswire.jsword.book.Book) EntityDoc(com.tyndalehouse.step.core.data.EntityDoc) BookData(org.crosswire.jsword.book.BookData) Key(org.crosswire.jsword.passage.Key)

Example 33 with EntityDoc

use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.

the class AbstractAncientSuggestionServiceImpl method collectNonExactMatches.

@Override
public EntityDoc[] collectNonExactMatches(final TopFieldCollector collector, final SuggestionContext context, final EntityDoc[] alreadyRetrieved, final int leftToCollect) {
    if (context.getInput().indexOf(' ') != -1) {
        return new EntityDoc[0];
    }
    final BooleanQuery query = this.getQuery(context.getInput(), false);
    if (alreadyRetrieved != null) {
        for (EntityDoc doc : alreadyRetrieved) {
            // make sure we don't retrieve docs that have already been retrieved
            query.add(new TermQuery(new Term("strongNumber", doc.get("strongNumber"))), BooleanClause.Occur.MUST_NOT);
        }
    }
    final EntityDoc[] search = this.reader.search(query, this.filter, collector);
    // we're interested in the results if we wanted more, or if we're retrieving a single result (cos we don't want to display grouping)
    if (leftToCollect > 0 || collector.getTotalHits() == 1) {
        return search;
    }
    // not really interested, just interested in the count
    return new EntityDoc[0];
}
Also used : BooleanQuery(org.apache.lucene.search.BooleanQuery) TermQuery(org.apache.lucene.search.TermQuery) EntityDoc(com.tyndalehouse.step.core.data.EntityDoc) Term(org.apache.lucene.index.Term)

Example 34 with EntityDoc

use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.

the class VocabularyServiceImpl method reOrder.

/**
 * Re-orders based on the input.
 *
 * @param strongList the order list of stongs
 * @param strongDefs the definitions that have been found
 * @return the entity doc[]
 */
private EntityDoc[] reOrder(final String[] strongList, final EntityDoc[] strongDefs) {
    final Map<String, EntityDoc> entitiesByStrong = new HashMap<>(strongList.length * 2);
    for (final EntityDoc def : strongDefs) {
        entitiesByStrong.put(def.get("strongNumber"), def);
    }
    final EntityDoc[] results = new EntityDoc[strongDefs.length];
    int current = 0;
    for (final String strong : strongList) {
        final EntityDoc entityDoc = entitiesByStrong.get(strong);
        if (entityDoc != null) {
            results[current++] = entityDoc;
        }
    }
    return results;
}
Also used : EntityDoc(com.tyndalehouse.step.core.data.EntityDoc)

Example 35 with EntityDoc

use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.

the class VocabularyServiceImpl method readRelatedWords.

/**
 * Read related words, i.e. all the words that are in the related numbers fields.
 *
 * @param defs the definitions that have been looked up.
 * @return the map
 */
private Map<String, List<LexiconSuggestion>> readRelatedWords(final EntityDoc[] defs, final String userLanguage) {
    // this map keys the original word strong number to all the related codes
    final Map<String, SortedSet<LexiconSuggestion>> relatedWords = new HashMap<String, SortedSet<LexiconSuggestion>>(defs.length * 2);
    // to avoid doing lookups twice, we key each short definition by its code as well
    final Map<String, LexiconSuggestion> lookedUpWords = new HashMap<>(defs.length * 2);
    for (final EntityDoc doc : defs) {
        final String sourceNumber = doc.get("strongNumber");
        final String relatedWordNumbers = doc.get("relatedNumbers");
        final String[] allRelatedWords = split(relatedWordNumbers, "[ ,]+");
        for (final String relatedWord : allRelatedWords) {
            LexiconSuggestion shortLexiconDefinition = lookedUpWords.get(relatedWord);
            // look up related word from index
            if (shortLexiconDefinition == null) {
                final EntityDoc[] relatedDoc = this.definitions.searchUniqueBySingleField("strongNumber", userLanguage, relatedWord);
                // assume first doc
                if (relatedDoc.length > 0) {
                    shortLexiconDefinition = OriginalWordUtils.convertToSuggestion(relatedDoc[0], userLanguage);
                    lookedUpWords.put(relatedWord, shortLexiconDefinition);
                }
            }
            // store as a link to its source number
            if (shortLexiconDefinition != null) {
                SortedSet<LexiconSuggestion> associatedNumbersSoFar = relatedWords.get(sourceNumber);
                if (associatedNumbersSoFar == null) {
                    associatedNumbersSoFar = new TreeSet<>(SortingUtils.LEXICON_SUGGESTION_COMPARATOR);
                    relatedWords.put(sourceNumber, associatedNumbersSoFar);
                }
                associatedNumbersSoFar.add(shortLexiconDefinition);
            }
        }
    }
    return convertToListMap(relatedWords);
}
Also used : LexiconSuggestion(com.tyndalehouse.step.core.models.LexiconSuggestion) EntityDoc(com.tyndalehouse.step.core.data.EntityDoc)

Aggregations

EntityDoc (com.tyndalehouse.step.core.data.EntityDoc)39 ArrayList (java.util.ArrayList)14 HashMap (java.util.HashMap)6 HashSet (java.util.HashSet)6 StepInternalException (com.tyndalehouse.step.core.exceptions.StepInternalException)5 TranslatedException (com.tyndalehouse.step.core.exceptions.TranslatedException)5 LexiconSuggestion (com.tyndalehouse.step.core.models.LexiconSuggestion)5 ParseException (org.apache.lucene.queryParser.ParseException)5 SearchQuery (com.tyndalehouse.step.core.service.impl.SearchQuery)4 IOException (java.io.IOException)4 MultiFieldQueryParser (org.apache.lucene.queryParser.MultiFieldQueryParser)4 Query (org.apache.lucene.search.Query)4 BookAndBibleCount (com.tyndalehouse.step.core.models.search.BookAndBibleCount)3 SearchEntry (com.tyndalehouse.step.core.models.search.SearchEntry)3 SearchResult (com.tyndalehouse.step.core.models.search.SearchResult)3 QueryParser (org.apache.lucene.queryParser.QueryParser)3 Key (org.crosswire.jsword.passage.Key)3 ExpandableSubjectHeadingEntry (com.tyndalehouse.step.core.models.search.ExpandableSubjectHeadingEntry)2 LexicalSearchEntry (com.tyndalehouse.step.core.models.search.LexicalSearchEntry)2 SubjectHeadingSearchEntry (com.tyndalehouse.step.core.models.search.SubjectHeadingSearchEntry)2