use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.
the class JSwordStrongNumberHelper method readDataFromLexicon.
/**
* Read data from lexicon.
*
* @param reader the reader
* @param verseRef the verse ref
* @param augmentedStrongNumbers the strong numbers
*/
private void readDataFromLexicon(final EntityIndexReader reader, final String verseRef, final String augmentedStrongNumbers, final String userLanguage) {
final EntityDoc[] docs = reader.search("strongNumber", augmentedStrongNumbers);
final List<LexiconSuggestion> verseSuggestions = new ArrayList<>();
Map<String, LexiconSuggestion> suggestionsFromSearch = new HashMap<>(docs.length * 2);
for (final EntityDoc d : docs) {
final LexiconSuggestion ls = new LexiconSuggestion();
ls.setStrongNumber(d.get("strongNumber"));
ls.setGloss(d.get("stepGloss"));
if (userLanguage.equalsIgnoreCase("es")) {
ls.set_es_Gloss(d.get("es_Gloss"));
} else if (userLanguage.equalsIgnoreCase("zh")) {
ls.set_zh_Gloss(d.get("zh_Gloss"));
} else if (userLanguage.equalsIgnoreCase("zh_tw")) {
ls.set_zh_tw_Gloss(d.get("zh_tw_Gloss"));
}
ls.setMatchingForm(d.get("accentedUnicode"));
ls.setStepTransliteration(d.get("stepTransliteration"));
suggestionsFromSearch.put(ls.getStrongNumber(), ls);
this.allStrongs.put(ls.getStrongNumber(), new BookAndBibleCount());
}
String[] strongs = StringUtils.split(augmentedStrongNumbers);
for (String s : strongs) {
verseSuggestions.add(suggestionsFromSearch.get(s));
}
this.verseStrongs.put(verseRef, verseSuggestions);
}
use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.
the class JSwordStrongNumberHelper method calculateCounts.
/**
* Calculate counts for a particular key.
*/
private void calculateCounts(String userLanguage) {
try {
// is key OT or NT
final BibleBook book = this.reference.getBook();
this.isOT = DivisionName.OLD_TESTAMENT.contains(book);
final Versification targetVersification = isOT ? otV11n : ntV11n;
final Key key = VersificationsMapper.instance().mapVerse(this.reference, targetVersification);
this.verseStrongs = new TreeMap<>();
this.allStrongs = new HashMap<>(256);
final Book preferredCountBook = getPreferredCountBook(this.isOT);
final List<Element> elements = JSwordUtils.getOsisElements(new BookData(preferredCountBook, key));
Map<String, EntityDoc> augmentedReferences = new HashMap<>(16);
for (final Element e : elements) {
final String verseRef = e.getAttributeValue(OSISUtil.OSIS_ATTR_OSISID);
final String strongsNumbers = OSISUtil.getStrongsNumbers(e);
if (StringUtils.isBlank(strongsNumbers)) {
LOG.warn("Attempting to search for 'no strongs' in verse [{}]", verseRef);
return;
}
final String strongQuery = StringConversionUtils.getStrongPaddedKey(strongsNumbers);
final StrongAugmentationService.AugmentedStrongs augmentedStrongs = strongAugmentationService.augment(preferredCountBook.getInitials(), verseRef, strongQuery);
final String augmentedStrongNumbers = StringUtils.join(augmentedStrongs.getStrongList(), ' ');
readDataFromLexicon(this.definitions, verseRef, augmentedStrongNumbers, userLanguage);
// build references that apply to each augmented strong number
final EntityDoc[] entityDocs = augmentedStrongs.getEntityDocs();
for (EntityDoc ed : entityDocs) {
final String augmentedStrong = ed.get("augmentedStrong");
augmentedReferences.put(augmentedStrong, ed);
}
}
// now get counts in the relevant portion of text
applySearchCounts(getBookFromKey(key), augmentedReferences);
} catch (final NoSuchKeyException ex) {
LOG.warn("Unable to enhance verse numbers.", ex);
} catch (final BookException ex) {
LOG.warn("Unable to enhance verse number", ex);
}
}
use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.
the class AbstractAncientSuggestionServiceImpl method collectNonExactMatches.
@Override
public EntityDoc[] collectNonExactMatches(final TopFieldCollector collector, final SuggestionContext context, final EntityDoc[] alreadyRetrieved, final int leftToCollect) {
if (context.getInput().indexOf(' ') != -1) {
return new EntityDoc[0];
}
final BooleanQuery query = this.getQuery(context.getInput(), false);
if (alreadyRetrieved != null) {
for (EntityDoc doc : alreadyRetrieved) {
// make sure we don't retrieve docs that have already been retrieved
query.add(new TermQuery(new Term("strongNumber", doc.get("strongNumber"))), BooleanClause.Occur.MUST_NOT);
}
}
final EntityDoc[] search = this.reader.search(query, this.filter, collector);
// we're interested in the results if we wanted more, or if we're retrieving a single result (cos we don't want to display grouping)
if (leftToCollect > 0 || collector.getTotalHits() == 1) {
return search;
}
// not really interested, just interested in the count
return new EntityDoc[0];
}
use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.
the class VocabularyServiceImpl method reOrder.
/**
* Re-orders based on the input.
*
* @param strongList the order list of stongs
* @param strongDefs the definitions that have been found
* @return the entity doc[]
*/
private EntityDoc[] reOrder(final String[] strongList, final EntityDoc[] strongDefs) {
final Map<String, EntityDoc> entitiesByStrong = new HashMap<>(strongList.length * 2);
for (final EntityDoc def : strongDefs) {
entitiesByStrong.put(def.get("strongNumber"), def);
}
final EntityDoc[] results = new EntityDoc[strongDefs.length];
int current = 0;
for (final String strong : strongList) {
final EntityDoc entityDoc = entitiesByStrong.get(strong);
if (entityDoc != null) {
results[current++] = entityDoc;
}
}
return results;
}
use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.
the class VocabularyServiceImpl method readRelatedWords.
/**
* Read related words, i.e. all the words that are in the related numbers fields.
*
* @param defs the definitions that have been looked up.
* @return the map
*/
private Map<String, List<LexiconSuggestion>> readRelatedWords(final EntityDoc[] defs, final String userLanguage) {
// this map keys the original word strong number to all the related codes
final Map<String, SortedSet<LexiconSuggestion>> relatedWords = new HashMap<String, SortedSet<LexiconSuggestion>>(defs.length * 2);
// to avoid doing lookups twice, we key each short definition by its code as well
final Map<String, LexiconSuggestion> lookedUpWords = new HashMap<>(defs.length * 2);
for (final EntityDoc doc : defs) {
final String sourceNumber = doc.get("strongNumber");
final String relatedWordNumbers = doc.get("relatedNumbers");
final String[] allRelatedWords = split(relatedWordNumbers, "[ ,]+");
for (final String relatedWord : allRelatedWords) {
LexiconSuggestion shortLexiconDefinition = lookedUpWords.get(relatedWord);
// look up related word from index
if (shortLexiconDefinition == null) {
final EntityDoc[] relatedDoc = this.definitions.searchUniqueBySingleField("strongNumber", userLanguage, relatedWord);
// assume first doc
if (relatedDoc.length > 0) {
shortLexiconDefinition = OriginalWordUtils.convertToSuggestion(relatedDoc[0], userLanguage);
lookedUpWords.put(relatedWord, shortLexiconDefinition);
}
}
// store as a link to its source number
if (shortLexiconDefinition != null) {
SortedSet<LexiconSuggestion> associatedNumbersSoFar = relatedWords.get(sourceNumber);
if (associatedNumbersSoFar == null) {
associatedNumbersSoFar = new TreeSet<>(SortingUtils.LEXICON_SUGGESTION_COMPARATOR);
relatedWords.put(sourceNumber, associatedNumbersSoFar);
}
associatedNumbersSoFar.add(shortLexiconDefinition);
}
}
}
return convertToListMap(relatedWords);
}
Aggregations