use of org.edamontology.edammap.core.query.Keyword in project edammap by edamontology.
the class Report method writeArticle.
private static void writeArticle(CoreArgs args, QueryType type, Writer writer, Map<EdamUri, Concept> concepts, Query query, int queriesSize, List<Publication> publications, List<Webpage> webpages, List<Webpage> docs, MappingTest mapping, int page, int nr, int nrMin, int nrMax) throws IOException {
FetcherArgs fetcherArgs = args.getFetcherArgs();
writer.write("<article>\n");
writer.write("\t<h2 id=\"" + nr + "\"><span>");
if (queriesSize > 1) {
writer.write("<span class=\"rank\">" + nr + ". </span>");
}
writer.write("<span>" + (query.getName() != null ? FetcherCommon.escapeHtml(query.getName()) : "") + "</span>");
if (query.getId() != null) {
if (type == QueryType.biotools) {
writer.write("<a href=\"" + FetcherCommon.escapeHtmlAttribute(QueryLoader.BIOTOOLS + query.getId()) + "\" class=\"biotools-link\"></a>");
} else {
writer.write("<span> (" + FetcherCommon.escapeHtml(query.getId()) + ")</span>");
}
}
writer.write("</span><span>");
String next = "";
if (nr == nrMax && nr < queriesSize) {
next = "index" + (page + 1) + ".html#" + (nr + 1);
} else if (nr < nrMax) {
next = "#" + (nr + 1);
}
if (next.isEmpty()) {
writer.write("<span class=\"next\"></span>");
} else {
writer.write("<a href=\"" + next + "\" class=\"next\"></a>");
}
String previous = "";
if (nr == nrMin && nr > 1) {
previous = "index" + (page - 1 == 1 ? "" : page - 1) + ".html#" + (nr - 1);
} else if (nr > nrMin) {
previous = "#" + (nr - 1);
}
if (previous.isEmpty()) {
writer.write("<span class=\"previous\"></span>");
} else {
writer.write("<a href=\"" + previous + "\" class=\"previous\"></a>");
}
writer.write("<a href=\"#" + nr + "\" class=\"current\"></a>");
writer.write("</span></h2>\n");
boolean webpagesPresent = false;
if (query.getWebpageUrls() != null) {
for (Link link : query.getWebpageUrls()) {
if (link != null && link.getUrl() != null && !link.getUrl().isEmpty()) {
webpagesPresent = true;
break;
}
}
}
boolean docsPresent = false;
if (query.getDocUrls() != null) {
for (Link link : query.getDocUrls()) {
if (link != null && link.getUrl() != null && !link.getUrl().isEmpty()) {
docsPresent = true;
break;
}
}
}
boolean miscPresent = (query.getKeywords() != null && !query.getKeywords().isEmpty()) || (query.getDescription() != null && !query.getDescription().isEmpty()) || webpagesPresent || docsPresent;
boolean publicationsPresent = false;
if (query.getPublicationIds() != null) {
for (PublicationIdsQuery publicationIds : query.getPublicationIds()) {
if (publicationIds != null && !publicationIds.isEmpty()) {
publicationsPresent = true;
break;
}
}
}
if (miscPresent || publicationsPresent) {
if (publicationsPresent) {
writer.write("\t<section class=\"query\">\n");
} else {
writer.write("\t<section class=\"query query-no-publications\">\n");
}
if (miscPresent) {
writer.write("\t\t<section class=\"misc\">\n");
if (query.getKeywords() != null && !query.getKeywords().isEmpty()) {
Map<String, List<Keyword>> keywords = new LinkedHashMap<>();
for (Keyword keyword : query.getKeywords()) {
if (keywords.get(keyword.getType()) == null) {
keywords.put(keyword.getType(), new ArrayList<>());
}
keywords.get(keyword.getType()).add(keyword);
}
for (Map.Entry<String, List<Keyword>> entry : keywords.entrySet()) {
writer.write("\t\t\t<div class=\"generic\">\n");
writer.write("\t\t\t\t<h3>" + FetcherCommon.escapeHtml(entry.getKey()) + "</h3><br>\n");
writer.write("\t\t\t\t<div>");
writer.write(entry.getValue().stream().map(k -> FetcherCommon.getLinkHtml(k.getUrl(), k.getValue())).collect(Collectors.joining("; ")));
writer.write("</div>\n");
writer.write("\t\t\t</div>\n");
}
}
if (query.getDescription() != null && !query.getDescription().isEmpty()) {
writer.write("\t\t\t<div class=\"generic\">\n");
writer.write("\t\t\t\t<h3>Description</h3><br>\n");
writer.write("\t\t\t\t<div>" + FetcherCommon.getParagraphsHtml(query.getDescription()) + "</div>\n");
writer.write("\t\t\t</div>\n");
}
if (webpagesPresent) {
writer.write("\t\t\t<div class=\"links\">\n");
writer.write("\t\t\t\t<h3>Links</h3><br>\n");
writer.write("\t\t\t\t<div>\n");
writeLinks(fetcherArgs, writer, query.getWebpageUrls(), webpages);
writer.write("\t\t\t\t</div>\n");
writer.write("\t\t\t</div>\n");
}
if (docsPresent) {
writer.write("\t\t\t<div class=\"links\">\n");
writer.write("\t\t\t\t<h3>Documentation</h3><br>\n");
writer.write("\t\t\t\t<div>\n");
writeLinks(fetcherArgs, writer, query.getDocUrls(), docs);
writer.write("\t\t\t\t</div>\n");
writer.write("\t\t\t</div>\n");
}
writer.write("\t\t</section>\n");
}
if (publicationsPresent) {
writer.write("\t\t<section class=\"publications\">\n");
writePublications(fetcherArgs, writer, query.getPublicationIds(), publications);
writer.write("\t\t</section>\n");
}
writer.write("\t</section>\n");
}
writer.write("\t<section class=\"mapping\">\n");
writeMatches(args.getMapperArgs().getScoreArgs(), writer, concepts, query, publications, mapping);
writer.write("\t</section>\n");
writer.write("</article>\n\n");
}
use of org.edamontology.edammap.core.query.Keyword in project edammap by edamontology.
the class Processor method getProcessedQuery.
public QueryProcessed getProcessedQuery(Query query, QueryType type, PreProcessor pp, Idf queryIdf, FetcherArgs fetcherArgs) {
QueryProcessed queryProcessed = new QueryProcessed();
boolean removeBroken = (type == QueryType.Bioconductor);
if (query.getName() != null) {
List<String> nameTokens = pp.process(query.getName());
if (!nameTokens.isEmpty()) {
queryProcessed.setNameTokens(nameTokens);
if (queryIdf != null) {
queryProcessed.setNameIdfs(queryIdf.getIdf(nameTokens));
}
}
}
if (query.getKeywords() != null) {
for (Keyword keyword : query.getKeywords()) {
String keywordValue = keyword.getValue();
List<String> keywordTokens = null;
List<Double> keywordIdfs = null;
if (keywordValue != null) {
keywordTokens = pp.process(keywordValue);
if (keywordTokens.isEmpty()) {
keywordTokens = null;
} else if (queryIdf != null) {
keywordIdfs = queryIdf.getIdf(keywordTokens);
}
}
queryProcessed.addKeywordTokens(keywordTokens);
queryProcessed.addKeywordIdfs(keywordIdfs);
}
}
if (query.getDescription() != null) {
List<String> descriptionTokens = pp.process(query.getDescription());
if (!descriptionTokens.isEmpty()) {
queryProcessed.setDescriptionTokens(descriptionTokens);
if (queryIdf != null) {
queryProcessed.setDescriptionIdfs(queryIdf.getIdf(descriptionTokens));
}
}
}
if (query.getWebpageUrls() != null) {
for (Iterator<Link> it = query.getWebpageUrls().iterator(); it.hasNext(); ) {
String webpageUrl = it.next().getUrl();
Webpage webpage = FetcherCommon.getWebpage(webpageUrl, database, fetcher, fetcherArgs);
List<String> webpageTokens = null;
List<Double> webpageIdfs = null;
if (webpage != null && webpage.isUsable(fetcherArgs)) {
webpageTokens = pp.process(webpage.getTitle() + " " + webpage.getContent());
if (webpageTokens.isEmpty()) {
webpageTokens = null;
} else if (queryIdf != null) {
webpageIdfs = queryIdf.getIdf(webpageTokens);
}
}
if (webpageTokens == null && removeBroken) {
it.remove();
} else {
queryProcessed.addWebpage(webpage);
queryProcessed.addWebpageTokens(webpageTokens);
queryProcessed.addWebpageIdfs(webpageIdfs);
}
}
}
if (query.getDocUrls() != null) {
for (Iterator<Link> it = query.getDocUrls().iterator(); it.hasNext(); ) {
String docUrl = it.next().getUrl();
Webpage doc = FetcherCommon.getDoc(docUrl, database, fetcher, fetcherArgs);
List<String> docTokens = null;
List<Double> docIdfs = null;
if (doc != null && doc.isUsable(fetcherArgs)) {
docTokens = pp.process(doc.getTitle() + " " + doc.getContent());
if (docTokens.isEmpty()) {
docTokens = null;
} else if (queryIdf != null) {
docIdfs = queryIdf.getIdf(docTokens);
}
}
if (docTokens == null && removeBroken) {
it.remove();
} else {
queryProcessed.addDoc(doc);
queryProcessed.addDocTokens(docTokens);
queryProcessed.addDocIdfs(docIdfs);
}
}
}
if (query.getPublicationIds() != null) {
for (PublicationIdsQuery publicationIds : query.getPublicationIds()) {
Publication publication = FetcherCommon.getPublication(publicationIds, database, fetcher, null, fetcherArgs);
if (publication != null) {
queryProcessed.addPublication(publication);
queryProcessed.addProcessedPublication(processPublication(publication, pp, queryIdf, fetcherArgs));
} else {
queryProcessed.addPublication(null);
queryProcessed.addProcessedPublication(null);
}
}
}
return queryProcessed;
}
Aggregations