use of org.hippoecm.hst.core.request.HstRequestContext in project hippo by NHS-digital-website.
the class FeedHubComponent method getFeed.
private <T extends HippoBean> List<T> getFeed(HstRequest request) throws QueryException {
final HstRequestContext context = request.getRequestContext();
FeedHub feedHub = (FeedHub) context.getContentBean();
HippoBean folder = feedHub.getParentBean();
ArrayList<Constraint> constraints = new ArrayList<>();
if ("Site-wide documents".equalsIgnoreCase(feedHub.getHubType())) {
folder = RequestContextProvider.get().getSiteContentBaseBean();
if (feedHub.getFeedType().equals("Cyber Alerts")) {
constraints.add(constraint("website:publicallyaccessible").equalTo(true));
} else if (!feedHub.getFeedType().equals("Supplementary information")) {
constraints.add(constraint("website:display").equalTo(true));
}
}
String dateField = "website:publisheddatetime";
Class feedClass = null;
switch(feedHub.getFeedType()) {
case "News":
feedClass = News.class;
if (filterValues.get("year").length > 0) {
Calendar newsDateFilter = Calendar.getInstance();
newsDateFilter.set(Calendar.YEAR, Integer.parseInt(filterValues.get("year")[0]));
newsDateFilter.set(Calendar.DAY_OF_YEAR, 1);
constraints.add(constraint(dateField).equalTo(newsDateFilter, DateTools.Resolution.YEAR));
}
break;
case "Events":
feedClass = Event.class;
dateField = "website:events/@website:startdatetime";
if (filterValues.get("year").length > 0) {
Calendar eventsDateFilter = Calendar.getInstance();
eventsDateFilter.set(Calendar.YEAR, Integer.parseInt(filterValues.get("year")[0]));
eventsDateFilter.set(Calendar.DAY_OF_YEAR, 1);
constraints.add(constraint("website:events/website:startdatetime").equalTo(eventsDateFilter, DateTools.Resolution.YEAR));
if (filterValues.get("month").length > 0) {
Integer month = getMonth(filterValues.get("month")[0]);
if (month != null) {
eventsDateFilter.set(Calendar.MONTH, month);
eventsDateFilter.set(Calendar.DAY_OF_MONTH, 1);
constraints.add(constraint("website:events/website:startdatetime").equalTo(eventsDateFilter, DateTools.Resolution.MONTH));
}
}
}
if (filterValues.get("type[]").length > 0) {
String[] types = filterValues.get("type[]");
for (String type : types) {
constraints.add(constraint("website:type").equalTo(type));
}
}
break;
case "Cyber Alerts":
feedClass = CyberAlert.class;
dateField = "publicationsystem:NominalDate";
if (filterValues.get("year").length > 0) {
Calendar cyberAlertsDateFilter = Calendar.getInstance();
cyberAlertsDateFilter.set(Calendar.YEAR, Integer.parseInt(filterValues.get("year")[0]));
cyberAlertsDateFilter.set(Calendar.DAY_OF_YEAR, 1);
constraints.add(constraint(dateField).equalTo(cyberAlertsDateFilter, DateTools.Resolution.YEAR));
}
if (filterValues.get("type[]").length > 0) {
String[] types = filterValues.get("type[]");
for (String type : types) {
constraints.add(constraint("website:threattype").equalTo(type));
}
}
if (filterValues.get("severity").length > 0) {
constraints.add(constraint("website:severity").equalTo(filterValues.get("severity")[0]));
}
break;
case "Supplementary information":
feedClass = SupplementaryInformation.class;
dateField = "publicationsystem:NominalDate";
if (filterValues.get("year").length > 0) {
Calendar supplimentaryInfoDateFilter = Calendar.getInstance();
String year = filterValues.get("year")[0];
if (year.equals("Unknown")) {
constraints.add(constraint(dateField).notExists());
} else {
supplimentaryInfoDateFilter.set(Calendar.YEAR, Integer.parseInt(year));
supplimentaryInfoDateFilter.set(Calendar.DAY_OF_YEAR, 1);
DateTools.Resolution dateResolution = DateTools.Resolution.YEAR;
if (filterValues.get("month").length > 0) {
Integer month = getMonth(filterValues.get("month")[0]);
if (month != null) {
supplimentaryInfoDateFilter.set(Calendar.MONTH, month);
supplimentaryInfoDateFilter.set(Calendar.DAY_OF_MONTH, 1);
dateResolution = DateTools.Resolution.MONTH;
}
}
constraints.add(constraint(dateField).equalTo(supplimentaryInfoDateFilter, dateResolution));
}
}
break;
default:
}
if (queryText != null && !queryText.isEmpty()) {
constraints.add(or(constraint("website:title").contains(queryText), constraint("website:shortsummary").contains(queryText)));
}
HstQueryBuilder query = HstQueryBuilder.create(folder);
query.where(and(constraints.toArray(new Constraint[0]))).ofTypes(feedClass);
if (sort.equals("date-asc")) {
query.orderByAscending(dateField);
} else {
query.orderByDescending(dateField);
}
HippoBeanIterator beanIterator = query.build().execute().getHippoBeans();
return toList(beanIterator);
}
use of org.hippoecm.hst.core.request.HstRequestContext in project hippo by NHS-digital-website.
the class FeedListComponent method executeQuery.
/**
* Copied from uk.nhs.digital.common.components.EventsComponent
* Added a check for the "website:event" doctype.
* Runs super if not,
* Super from org.onehippo.cms7.essentials.components.EssentialsListComponent;
*/
@Override
protected <T extends EssentialsListComponentInfo> Pageable<HippoBean> executeQuery(HstRequest request, T paramInfo, HstQuery query) throws QueryException {
final FeedListComponentInfo info = getComponentParametersInfo(request);
final String documentTypes = info.getDocumentTypes();
if (documentTypes.equals("website:event")) {
int pageSize = this.getPageSize(request, paramInfo);
int page = this.getCurrentPage(request);
query.setLimit(pageSize);
query.setOffset((page - 1) * pageSize);
this.applyExcludeScopes(request, query, paramInfo);
this.buildAndApplyFilters(request, query);
try {
// the query coming from the component is manually extended since it needs to consider intervals
String eventQueryString = query.getQueryAsString(true);
// appending the query containing filters the on the interval compound
String queryString = eventQueryString + addIntervalFilter(request);
HstRequestContext requestContext = request.getRequestContext();
QueryManager jcrQueryManager = requestContext.getSession().getWorkspace().getQueryManager();
Query jcrQuery = jcrQueryManager.createQuery(queryString, "xpath");
QueryResult queryResult = jcrQuery.execute();
ObjectConverter objectConverter = requestContext.getContentBeansTool().getObjectConverter();
NodeIterator it = queryResult.getNodes();
List parentNodes = new ArrayList();
List<String> parentPath = new ArrayList();
// For this reason this component needs to fetch the parent node
while (it.hasNext() && parentPath.size() < pageSize) {
Node interval = it.nextNode();
Node eventNode = interval.getParent();
if (eventNode.getPrimaryNodeType().isNodeType("website:event") && !parentPath.contains(eventNode.getPath())) {
parentPath.add(eventNode.getPath());
parentNodes.add(objectConverter.getObject(eventNode));
}
}
return this.getPageableFactory().createPageable(parentNodes, page, pageSize);
} catch (RepositoryException repositoryEx) {
throw new QueryException(repositoryEx.getMessage());
} catch (ObjectBeanManagerException converterEx) {
throw new QueryException(converterEx.getMessage());
}
} else {
return super.executeQuery(request, paramInfo, query);
}
}
use of org.hippoecm.hst.core.request.HstRequestContext in project hippo by NHS-digital-website.
the class EventsComponent method executeQuery.
@Override
protected <T extends EssentialsListComponentInfo> Pageable<HippoBean> executeQuery(HstRequest request, T paramInfo, HstQuery query) throws QueryException {
int pageSize = this.getPageSize(request, paramInfo);
int page = this.getCurrentPage(request);
query.setLimit(pageSize);
query.setOffset((page - 1) * pageSize);
this.applyExcludeScopes(request, query, paramInfo);
this.buildAndApplyFilters(request, query);
try {
// the query coming from the component is manually extended since it needs to consider intervals
String eventQueryString = query.getQueryAsString(true);
// appending the query containing filters the on the interval compound
String queryString = eventQueryString + addIntervalFilter(request);
HstRequestContext requestContext = request.getRequestContext();
QueryManager jcrQueryManager = requestContext.getSession().getWorkspace().getQueryManager();
Query jcrQuery = jcrQueryManager.createQuery(queryString, "xpath");
QueryResult queryResult = jcrQuery.execute();
ObjectConverter objectConverter = requestContext.getContentBeansTool().getObjectConverter();
NodeIterator it = queryResult.getNodes();
List parentNodes = new ArrayList();
List<String> parentPath = new ArrayList();
// For this reason this component needs to fetch the parent node
while (it.hasNext() && parentPath.size() < pageSize) {
Node interval = it.nextNode();
Node eventNode = interval.getParent();
if (eventNode.getPrimaryNodeType().isNodeType("website:event") && !parentPath.contains(eventNode.getPath())) {
parentPath.add(eventNode.getPath());
parentNodes.add(objectConverter.getObject(eventNode));
}
}
return this.getPageableFactory().createPageable(parentNodes, page, pageSize);
} catch (RepositoryException repositoryEx) {
throw new QueryException(repositoryEx.getMessage());
} catch (ObjectBeanManagerException converterEx) {
throw new QueryException(converterEx.getMessage());
}
}
use of org.hippoecm.hst.core.request.HstRequestContext in project hippo by NHS-digital-website.
the class AboutComponent method doBeforeRender.
@Override
public void doBeforeRender(final HstRequest request, final HstResponse response) throws HstComponentException {
super.doBeforeRender(request, response);
final HstRequestContext ctx = request.getRequestContext();
About document = (About) ctx.getContentBean();
request.setAttribute("document", document);
}
use of org.hippoecm.hst.core.request.HstRequestContext in project hippo by NHS-digital-website.
the class SeriesComponent method doBeforeRender.
@Override
public void doBeforeRender(final HstRequest request, final HstResponse response) throws HstComponentException {
super.doBeforeRender(request, response);
final HstRequestContext requestContext = request.getRequestContext();
final HippoBean contentBean = requestContext.getContentBean();
final Series seriesIndexDocument;
if (contentBean.isHippoFolderBean()) {
final List<Series> seriesIndexDocuments = contentBean.getChildBeans(Series.class);
if (seriesIndexDocuments.size() != 1) {
reportInvalidTarget(request, contentBean, seriesIndexDocuments.size());
return;
}
seriesIndexDocument = seriesIndexDocuments.get(0);
} else if (contentBean instanceof Series) {
seriesIndexDocument = (Series) contentBean;
} else {
reportInvalidInvocation(request, contentBean);
return;
}
/* Setting frequency & date naming map on request */
final ValueList frequencyValueList = SelectionUtil.getValueListByIdentifier("frequency", RequestContextProvider.get());
if (frequencyValueList != null) {
request.setAttribute("frequencyMap", SelectionUtil.valueListAsMap(frequencyValueList));
}
final ValueList dateNamingConvention = SelectionUtil.getValueListByIdentifier("datenamingconvention", RequestContextProvider.get());
if (dateNamingConvention != null) {
request.setAttribute("dateNamingMap", SelectionUtil.valueListAsMap(dateNamingConvention));
}
request.setAttribute("series", seriesIndexDocument);
try {
final HstQuery query = requestContext.getQueryManager().createQuery(seriesIndexDocument.getParentBean(), Publication.class, LegacyPublication.class);
query.addOrderByDescending("publicationsystem:NominalDate");
final HstQueryResult hstQueryResult = query.execute();
List<PublicationBase> allPublications = toList(hstQueryResult.getHippoBeans());
Map<Boolean, List<PublicationBase>> publicationByStatus = allPublications.stream().collect(Collectors.groupingBy(PublicationBase::isPubliclyAccessible));
List<PublicationBase> livePublications = publicationByStatus.get(true);
List<PublicationBase> upcomingPublications = publicationByStatus.get(false);
// Want upcoming in reverse date order to the closest to now is first
if (!isEmpty(upcomingPublications)) {
Collections.reverse(upcomingPublications);
}
if (!seriesIndexDocument.getShowLatest() && !isEmpty(livePublications)) {
livePublications.sort(DateComparator.COMPARATOR);
}
if (!isEmpty(livePublications) && seriesIndexDocument.getShowLatest()) {
// removes first publication as the publication available from Series.latestPublication
livePublications.remove(0);
}
request.setAttribute("upcomingPublications", upcomingPublications);
List<Pair> pastPublicationsAndSeriesChanges = new ArrayList<>();
for (PublicationBase publicationBase : livePublications) {
Pair<String, PublicationBase> pair = new Pair("publication", publicationBase, publicationBase.getNominalPublicationDateCalendar());
pastPublicationsAndSeriesChanges.add(pair);
}
if (seriesIndexDocument.getSeriesReplaces() != null) {
SeriesReplaces seriesReplaces = seriesIndexDocument.getSeriesReplaces();
if (seriesReplaces.getChangeDate() != null) {
Pair<String, Series> pair = new Pair("replacedSeries", seriesReplaces, seriesReplaces.getChangeDate().getTime());
pastPublicationsAndSeriesChanges.add(pair);
}
}
pastPublicationsAndSeriesChanges.sort(Comparator.comparing(Pair::getDate, Comparator.reverseOrder()));
request.setAttribute("pastPublicationsAndSeriesChanges", pastPublicationsAndSeriesChanges);
} catch (QueryException queryException) {
log.error("Failed to find publications for series " + seriesIndexDocument.getTitle(), queryException);
reportDisplayError(request, seriesIndexDocument.getTitle());
}
}
Aggregations