use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.
the class SearchServiceImpl method rebuildSearchResults.
/**
* Takes a new order and rebuilds a list of search results
*
* @param lexiconDefinitions the strong numbers, ordered
* @param keyedOrder the set of results to be re-ordered
* @return a new list of results, now ordered
*/
private List<LexicalSearchEntry> rebuildSearchResults(final List<EntityDoc> lexiconDefinitions, final Map<String, List<LexicalSearchEntry>> keyedOrder) {
final List<LexicalSearchEntry> newOrder = new ArrayList<LexicalSearchEntry>();
for (final EntityDoc def : lexiconDefinitions) {
final List<LexicalSearchEntry> list = keyedOrder.get(def.get(STRONG_NUMBER_FIELD));
if (list != null) {
newOrder.addAll(list);
for (final LexicalSearchEntry e : list) {
e.setStepGloss(def.get("stepGloss"));
e.setStepTransliteration(def.get("stepTransliteration"));
e.setAccentedUnicode(def.get("accentedUnicode"));
e.setStrongNumber(def.get("strongNumber"));
}
}
}
return newOrder;
}
use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.
the class SearchServiceImpl method findByTransliteration.
/**
* Runs the transliteration rules on the input in an attempt to match an entry in the lexicon
*
* @param query the query to be found
* @param isGreek true to indicate Greek, false to indicate Hebrew
* @return the strongs that have been found/matched.
*/
private Set<String> findByTransliteration(final String query, final boolean isGreek) {
// first find by transliterations that we have
final String lowerQuery = query.toLowerCase(Locale.ENGLISH);
final String simplifiedTransliteration = OriginalWordSuggestionServiceImpl.getSimplifiedTransliterationClause(isGreek, lowerQuery, false);
final EntityDoc[] specificFormEntities = this.specificForms.searchSingleColumn("simplifiedTransliteration", simplifiedTransliteration, getFilter(isGreek));
// finally, if we haven't found anything, then abort
if (specificFormEntities.length != 0) {
final Set<String> strongs = new HashSet<String>(specificFormEntities.length);
// nothing to search for..., so abort query
for (final EntityDoc f : specificFormEntities) {
strongs.add(f.get(STRONG_NUMBER_FIELD));
}
return strongs;
}
final MultiFieldQueryParser queryParser = new MultiFieldQueryParser(Version.LUCENE_30, new String[] { "simplifiedTransliteration", "stepTransliteration", "otherTransliteration" }, this.definitions.getAnalyzer());
try {
final Query luceneQuery = queryParser.parse("-stopWord:true " + lowerQuery);
final EntityDoc[] results = this.definitions.search(luceneQuery);
if (results.length == 0) {
throw new AbortQueryException("No definitions found for input");
}
final Set<String> strongs = new HashSet<String>(results.length);
for (final EntityDoc d : results) {
strongs.add(d.get(STRONG_NUMBER_FIELD));
}
return strongs;
} catch (final ParseException e) {
throw new TranslatedException(e, "search_invalid");
}
}
use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.
the class OriginalWordSuggestionServiceImpl method convertToSuggestionFromSpecificForm.
/**
* @param specificForm the specific form to be converted
* @return the suggestion
*/
private LexiconSuggestion convertToSuggestionFromSpecificForm(final EntityDoc specificForm) {
final String strongNumber = specificForm.get(STRONG_NUMBER_FIELD);
final EntityDoc[] results = this.definitions.searchExactTermBySingleField(STRONG_NUMBER_FIELD, 1, strongNumber);
if (results.length > 0) {
final LexiconSuggestion suggestion = new LexiconSuggestion();
suggestion.setStrongNumber(strongNumber);
suggestion.setGloss(results[0].get("stepGloss"));
suggestion.setMatchingForm(specificForm.get("accentedUnicode"));
suggestion.setStepTransliteration(specificForm.get("stepTransliteration"));
return suggestion;
}
return null;
}
use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.
the class JSwordStrongNumberHelper method calculateStrongArrayCounts.
/**
* Calculate counts for an array of Strong number.
*/
public PassageStat calculateStrongArrayCounts(final String version, PassageStat stat, final String userLanguage) {
Map<String, Integer[]> result = new HashMap<String, Integer[]>(128);
this.isOT = DivisionName.OLD_TESTAMENT.contains(this.reference.getBook());
final Versification targetVersification = isOT ? otV11n : ntV11n;
final Key key = VersificationsMapper.instance().mapVerse(this.reference, targetVersification);
this.allStrongs = new HashMap<>(256);
Map<String, Integer[]> temp = stat.getStats();
temp.forEach((strongNum, feq) -> this.allStrongs.put(strongNum, new BookAndBibleCount()));
Map<String, EntityDoc> augmentedReferences = new HashMap<>(0);
// now get counts in the relevant portion of text
applySearchCounts(getBookFromKey(key), augmentedReferences);
temp.forEach((strongNum, freq) -> {
BookAndBibleCount bBCount = this.allStrongs.get(strongNum);
result.put(strongNum, new Integer[] { freq[0], bBCount.getBook(), bBCount.getBible() });
});
stat.setStats(result);
return stat;
}
use of com.tyndalehouse.step.core.data.EntityDoc in project step by STEPBible.
the class JSwordStrongNumberHelper method applySearchCounts.
/**
* Applies the search counts for every strong number.
*
* @param bookName the book name
* @param augmentedByStrong the augmented strongs found in the original augmentation querys
*/
private void applySearchCounts(final String bookName, final Map<String, EntityDoc> augmentedByStrong) {
try {
final IndexSearcher is = jSwordSearchService.getIndexSearcher(this.isOT ? STRONG_OT_VERSION_BOOK.getInitials() : STRONG_NT_VERSION_BOOK.getInitials());
final TermDocs termDocs = is.getIndexReader().termDocs();
for (final Entry<String, BookAndBibleCount> strong : this.allStrongs.entrySet()) {
final String strongKey = strong.getKey();
termDocs.seek(new Term(LuceneIndex.FIELD_STRONG, this.strongAugmentationService.reduce(strongKey)));
final EntityDoc entityDoc = augmentedByStrong.get(strongKey);
final String references = entityDoc != null ? entityDoc.get("references") : null;
// we'll never need more than 200 documents as this is the cut off point
int bible = 0;
int book = 0;
while (termDocs.next()) {
final int freq = termDocs.freq();
final Document doc = is.doc(termDocs.doc());
final String docRef = doc.get(LuceneIndex.FIELD_KEY);
if ((references == null || augmentedVersionInVerse(docRef, references))) {
if (docRef != null && docRef.startsWith(bookName)) {
book += freq;
}
bible += freq;
}
}
final BookAndBibleCount value = strong.getValue();
value.setBible(bible);
value.setBook(book);
}
} catch (final IOException e) {
throw new StepInternalException(e.getMessage(), e);
}
}
Aggregations