Search in sources :

Example 46 with DataverseRequest

use of edu.harvard.iq.dataverse.engine.command.DataverseRequest in project dataverse by IQSS.

the class SearchFilesServiceBean method getFileView.

public FileView getFileView(DatasetVersion datasetVersion, User user, String userSuppliedQuery) {
    Dataverse dataverse = null;
    List<String> filterQueries = new ArrayList<>();
    filterQueries.add(SearchFields.TYPE + ":" + SearchConstants.FILES);
    filterQueries.add(SearchFields.PARENT_ID + ":" + datasetVersion.getDataset().getId());
    /**
     * @todo In order to support searching for files based on dataset
     * version for https://github.com/IQSS/dataverse/issues/2455 we're going
     * to need to make the dataset version id searchable, perhaps as part of
     * https://github.com/IQSS/dataverse/issues/2038
     */
    // filterQueries.add(SearchFields.DATASET_VERSION_ID + ":" + datasetVersion.getId());
    String finalQuery = SearchUtil.determineFinalQuery(userSuppliedQuery);
    SortBy sortBy = getSortBy(finalQuery);
    String sortField = sortBy.getField();
    String sortOrder = sortBy.getOrder();
    int paginationStart = 0;
    boolean onlyDataRelatedToMe = false;
    int numResultsPerPage = 25;
    SolrQueryResponse solrQueryResponse = null;
    try {
        HttpServletRequest httpServletRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest();
        solrQueryResponse = searchService.search(new DataverseRequest(user, httpServletRequest), dataverse, finalQuery, filterQueries, sortField, sortOrder, paginationStart, onlyDataRelatedToMe, numResultsPerPage);
    } catch (SearchException ex) {
        logger.info(SearchException.class + " searching for files: " + ex);
        return null;
    } catch (Exception ex) {
        logger.info(Exception.class + " searching for files: " + ex);
        return null;
    }
    return new FileView(solrQueryResponse.getSolrSearchResults(), solrQueryResponse.getFacetCategoryList(), solrQueryResponse.getFilterQueriesActual(), solrQueryResponse.getSolrQuery().getQuery());
}
Also used : ArrayList(java.util.ArrayList) Dataverse(edu.harvard.iq.dataverse.Dataverse) HttpServletRequest(javax.servlet.http.HttpServletRequest) DataverseRequest(edu.harvard.iq.dataverse.engine.command.DataverseRequest)

Example 47 with DataverseRequest

use of edu.harvard.iq.dataverse.engine.command.DataverseRequest in project dataverse by IQSS.

the class SearchIncludeFragment method search.

public void search(boolean onlyDataRelatedToMe) {
    logger.fine("search called");
    // wildcard/browse (*) unless user supplies a query
    String queryToPassToSolr = "*";
    if (this.query == null) {
        mode = browseModeString;
    } else if (this.query.isEmpty()) {
        mode = browseModeString;
    } else {
        mode = searchModeString;
    }
    if (mode.equals(browseModeString)) {
        queryToPassToSolr = "*";
        if (sortField == null) {
            sortField = searchFieldReleaseOrCreateDate;
        }
        if (sortOrder == null) {
            sortOrder = SortOrder.desc;
        }
        if (selectedTypesString == null || selectedTypesString.isEmpty()) {
            selectedTypesString = "dataverses:datasets";
        }
    } else if (mode.equals(searchModeString)) {
        queryToPassToSolr = query;
        if (sortField == null) {
            sortField = searchFieldRelevance;
        }
        if (sortOrder == null) {
            sortOrder = SortOrder.desc;
        }
        if (selectedTypesString == null || selectedTypesString.isEmpty()) {
            selectedTypesString = "dataverses:datasets:files";
        }
    }
    filterQueries = new ArrayList<>();
    for (String fq : Arrays.asList(fq0, fq1, fq2, fq3, fq4, fq5, fq6, fq7, fq8, fq9)) {
        if (fq != null) {
            filterQueries.add(fq);
        }
    }
    SolrQueryResponse solrQueryResponse = null;
    List<String> filterQueriesFinal = new ArrayList<>();
    if (dataverseAlias != null) {
        this.dataverse = dataverseService.findByAlias(dataverseAlias);
    }
    if (this.dataverse != null) {
        dataversePath = dataverseService.determineDataversePath(this.dataverse);
        String filterDownToSubtree = SearchFields.SUBTREE + ":\"" + dataversePath + "\"";
        // logger.info("SUBTREE parameter: " + dataversePath);
        if (!this.dataverse.equals(dataverseService.findRootDataverse())) {
            /**
             * @todo centralize this into SearchServiceBean
             */
            filterQueriesFinal.add(filterDownToSubtree);
        // this.dataverseSubtreeContext = dataversePath;
        } else {
            // this.dataverseSubtreeContext = "all";
            this.setRootDv(true);
        }
    } else {
        this.dataverse = dataverseService.findRootDataverse();
        // this.dataverseSubtreeContext = "all";
        this.setRootDv(true);
    }
    selectedTypesList = new ArrayList<>();
    String[] parts = selectedTypesString.split(":");
    // int count = 0;
    selectedTypesList.addAll(Arrays.asList(parts));
    List<String> filterQueriesFinalAllTypes = new ArrayList<>();
    String[] arr = selectedTypesList.toArray(new String[selectedTypesList.size()]);
    selectedTypesHumanReadable = combine(arr, " OR ");
    if (!selectedTypesHumanReadable.isEmpty()) {
        typeFilterQuery = SearchFields.TYPE + ":(" + selectedTypesHumanReadable + ")";
    }
    filterQueriesFinal.addAll(filterQueries);
    filterQueriesFinalAllTypes.addAll(filterQueriesFinal);
    filterQueriesFinal.add(typeFilterQuery);
    String allTypesFilterQuery = SearchFields.TYPE + ":(dataverses OR datasets OR files)";
    filterQueriesFinalAllTypes.add(allTypesFilterQuery);
    int paginationStart = (page - 1) * paginationGuiRows;
    /**
     * @todo
     *
     * design/make room for sort widget drop down:
     * https://redmine.hmdc.harvard.edu/issues/3482
     */
    // reset the solr error flag
    setSolrErrorEncountered(false);
    try {
        logger.fine("query from user:   " + query);
        logger.fine("queryToPassToSolr: " + queryToPassToSolr);
        logger.fine("sort by: " + sortField);
        /**
         * @todo Number of search results per page should be configurable -
         * https://github.com/IQSS/dataverse/issues/84
         */
        int numRows = 10;
        HttpServletRequest httpServletRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest();
        DataverseRequest dataverseRequest = new DataverseRequest(session.getUser(), httpServletRequest);
        solrQueryResponse = searchService.search(dataverseRequest, dataverse, queryToPassToSolr, filterQueriesFinal, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false);
        if (solrQueryResponse.hasError()) {
            logger.info(solrQueryResponse.getError());
            setSolrErrorEncountered(true);
        }
        // This 2nd search() is for populating the facets: -- L.A.
        // TODO: ...
        solrQueryResponseAllTypes = searchService.search(dataverseRequest, dataverse, queryToPassToSolr, filterQueriesFinalAllTypes, sortField, sortOrder.toString(), paginationStart, onlyDataRelatedToMe, numRows, false);
        if (solrQueryResponse.hasError()) {
            logger.info(solrQueryResponse.getError());
            setSolrErrorEncountered(true);
        }
    } catch (SearchException ex) {
        Throwable cause = ex;
        StringBuilder sb = new StringBuilder();
        sb.append(cause + " ");
        while (cause.getCause() != null) {
            cause = cause.getCause();
            sb.append(cause.getClass().getCanonicalName() + " ");
            sb.append(cause + " ");
        }
        String message = "Exception running search for [" + queryToPassToSolr + "] with filterQueries " + filterQueries + " and paginationStart [" + paginationStart + "]: " + sb.toString();
        logger.info(message);
        this.solrIsDown = true;
        this.searchException = ex;
    }
    if (!solrIsDown) {
        this.facetCategoryList = solrQueryResponse.getFacetCategoryList();
        this.searchResultsList = solrQueryResponse.getSolrSearchResults();
        this.searchResultsCount = solrQueryResponse.getNumResultsFound().intValue();
        this.datasetfieldFriendlyNamesBySolrField = solrQueryResponse.getDatasetfieldFriendlyNamesBySolrField();
        this.staticSolrFieldFriendlyNamesBySolrField = solrQueryResponse.getStaticSolrFieldFriendlyNamesBySolrField();
        this.filterQueriesDebug = solrQueryResponse.getFilterQueriesActual();
        this.errorFromSolr = solrQueryResponse.getError();
        paginationGuiStart = paginationStart + 1;
        paginationGuiEnd = Math.min(page * paginationGuiRows, searchResultsCount);
        List<SolrSearchResult> searchResults = solrQueryResponse.getSolrSearchResults();
        /**
         * @todo consider creating Java objects called DatasetCard,
         * DatasetCart, and FileCard since that's what we call them in the
         * UI. These objects' fields (affiliation, citation, etc.) would be
         * populated from Solr if possible (for performance, to avoid extra
         * database calls) or by a database call (if it's tricky or doesn't
         * make sense to get the data in and out of Solr). We would continue
         * to iterate through all the SolrSearchResult objects as we build
         * up the new card objects. Think about how we have a
         * solrSearchResult.setCitation method but only the dataset card in
         * the UI (currently) shows this "citation" field.
         */
        for (SolrSearchResult solrSearchResult : searchResults) {
            if (solrSearchResult.getEntityId() == null) {
                // avoiding EJBException a la https://redmine.hmdc.harvard.edu/issues/3809
                logger.warning(SearchFields.ENTITY_ID + " was null for Solr document id:" + solrSearchResult.getId() + ", skipping. Bad Solr data?");
                break;
            }
            // going to assume that this is NOT a linked object, for now:
            solrSearchResult.setIsInTree(true);
            if (solrSearchResult.getType().equals("dataverses")) {
                // logger.info("XXRESULT: dataverse: "+solrSearchResult.getEntityId());
                dataverseService.populateDvSearchCard(solrSearchResult);
            /*
                    Dataverses cannot be harvested yet.
                    if (isHarvestedDataverse(solrSearchResult.getEntityId())) {
                        solrSearchResult.setHarvested(true);
                    }*/
            } else if (solrSearchResult.getType().equals("datasets")) {
                // logger.info("XXRESULT: dataset: "+solrSearchResult.getEntityId());
                datasetVersionService.populateDatasetSearchCard(solrSearchResult);
                // @todo - the 3 lines below, should they be moved inside
                // searchServiceBean.search()?
                String deaccesssionReason = solrSearchResult.getDeaccessionReason();
                if (deaccesssionReason != null) {
                    solrSearchResult.setDescriptionNoSnippet(deaccesssionReason);
                }
            } else if (solrSearchResult.getType().equals("files")) {
                // logger.info("XXRESULT: datafile: "+solrSearchResult.getEntityId());
                dataFileService.populateFileSearchCard(solrSearchResult);
            /**
             * @todo: show DataTable variables
             */
            }
        }
        // populate preview counts: https://redmine.hmdc.harvard.edu/issues/3560
        previewCountbyType.put("dataverses", 0L);
        previewCountbyType.put("datasets", 0L);
        previewCountbyType.put("files", 0L);
        if (solrQueryResponseAllTypes != null) {
            for (FacetCategory facetCategory : solrQueryResponseAllTypes.getTypeFacetCategories()) {
                for (FacetLabel facetLabel : facetCategory.getFacetLabel()) {
                    previewCountbyType.put(facetLabel.getName(), facetLabel.getCount());
                }
            }
        }
    } else {
        // if SOLR is down:
        List contentsList = dataverseService.findByOwnerId(dataverse.getId());
        contentsList.addAll(datasetService.findByOwnerId(dataverse.getId()));
    // directChildDvObjectContainerList.addAll(contentsList);
    }
/**
 * @todo: pull values from datasetField.getTitle() rather than hard
 * coding them here
 */
// friendlyName.put(SearchFields.SUBTREE, "Dataverse Subtree");
// friendlyName.put(SearchFields.HOST_DATAVERSE, "Original Dataverse");
// friendlyName.put(SearchFields.AUTHOR_STRING, "Author");
// friendlyName.put(SearchFields.AFFILIATION, "Affiliation");
// friendlyName.put(SearchFields.KEYWORD, "Keyword");
// friendlyName.put(SearchFields.DISTRIBUTOR, "Distributor");
// friendlyName.put(SearchFields.FILE_TYPE, "File Type");
// friendlyName.put(SearchFields.PRODUCTION_DATE_YEAR_ONLY, "Production Date");
// friendlyName.put(SearchFields.DISTRIBUTION_DATE_YEAR_ONLY, "Distribution Date");
}
Also used : ArrayList(java.util.ArrayList) HttpServletRequest(javax.servlet.http.HttpServletRequest) DataverseRequest(edu.harvard.iq.dataverse.engine.command.DataverseRequest) ArrayList(java.util.ArrayList) List(java.util.List)

Example 48 with DataverseRequest

use of edu.harvard.iq.dataverse.engine.command.DataverseRequest in project dataverse by IQSS.

the class MyDataPage method init.

public String init() {
    if ((session.getUser() != null) && (session.getUser().isAuthenticated())) {
        authUser = (AuthenticatedUser) session.getUser();
    } else {
        return permissionsWrapper.notAuthorized();
    // redirect to login OR give some type ‘you must be logged in message'
    }
    // Initialize a filterParams object to buid the Publication Status checkboxes
    // 
    HttpServletRequest httpServletRequest = (HttpServletRequest) FacesContext.getCurrentInstance().getExternalContext().getRequest();
    DataverseRequest dataverseRequest = new DataverseRequest(authUser, httpServletRequest);
    this.filterParams = new MyDataFilterParams(dataverseRequest, MyDataFilterParams.defaultDvObjectTypes, null, null, null);
    // Temp DataverseRolePermissionHelper -- not in its normal role but for creating initial checkboxes
    // 
    rolePermissionHelper = new DataverseRolePermissionHelper(getRolesUsedToCreateCheckboxes(dataverseRequest));
    // this.setUserCountTotals(authUser, rolePermissionHelper);
    return null;
}
Also used : HttpServletRequest(javax.servlet.http.HttpServletRequest) DataverseRequest(edu.harvard.iq.dataverse.engine.command.DataverseRequest) DataverseRolePermissionHelper(edu.harvard.iq.dataverse.authorization.DataverseRolePermissionHelper)

Example 49 with DataverseRequest

use of edu.harvard.iq.dataverse.engine.command.DataverseRequest in project dataverse by IQSS.

the class FileRecordWriter method updateDatasetVersion.

// utils
/**
 * Update the dataset version using the command engine so permissions and constraints are enforced.
 * Log errors to both the glassfish log and in the job context, as the exit status "failed".
 *
 * @param version dataset version
 */
private void updateDatasetVersion(DatasetVersion version) {
    // update version using the command engine to enforce user permissions and constraints
    if (dataset.getVersions().size() == 1 && version.getVersionState() == DatasetVersion.VersionState.DRAFT) {
        try {
            Command<DatasetVersion> cmd;
            cmd = new UpdateDatasetVersionCommand(new DataverseRequest(user, (HttpServletRequest) null), version);
            commandEngine.submit(cmd);
        } catch (CommandException ex) {
            String commandError = "CommandException updating DatasetVersion from batch job: " + ex.getMessage();
            getJobLogger().log(Level.SEVERE, commandError);
            jobContext.setExitStatus("FAILED");
        }
    } else {
        String constraintError = "ConstraintException updating DatasetVersion form batch job: dataset must be a " + "single version in draft mode.";
        getJobLogger().log(Level.SEVERE, constraintError);
        jobContext.setExitStatus("FAILED");
    }
}
Also used : DataverseRequest(edu.harvard.iq.dataverse.engine.command.DataverseRequest) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) CommandException(edu.harvard.iq.dataverse.engine.command.exception.CommandException) UpdateDatasetVersionCommand(edu.harvard.iq.dataverse.engine.command.impl.UpdateDatasetVersionCommand)

Example 50 with DataverseRequest

use of edu.harvard.iq.dataverse.engine.command.DataverseRequest in project dataverse by IQSS.

the class MoveDatasetCommandTest method testKeepGuestbook.

/**
 * Moving  grandchildAA
 * Guestbook is not null because target includes it.
 */
@Test
public void testKeepGuestbook() throws Exception {
    DataverseRequest aRequest = new DataverseRequest(auth, httpRequest);
    testEngine.submit(new MoveDatasetCommand(aRequest, moved, grandchildAA, null));
    assertNotNull(moved.getGuestbook());
}
Also used : DataverseRequest(edu.harvard.iq.dataverse.engine.command.DataverseRequest) Test(org.junit.Test)

Aggregations

DataverseRequest (edu.harvard.iq.dataverse.engine.command.DataverseRequest)57 AuthenticatedUser (edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser)22 Dataverse (edu.harvard.iq.dataverse.Dataverse)21 Test (org.junit.Test)18 Dataset (edu.harvard.iq.dataverse.Dataset)15 Path (javax.ws.rs.Path)14 CommandException (edu.harvard.iq.dataverse.engine.command.exception.CommandException)13 SwordError (org.swordapp.server.SwordError)10 DatasetVersion (edu.harvard.iq.dataverse.DatasetVersion)7 HttpServletRequest (javax.servlet.http.HttpServletRequest)7 DataverseRole (edu.harvard.iq.dataverse.authorization.DataverseRole)6 IOException (java.io.IOException)6 POST (javax.ws.rs.POST)6 DataFile (edu.harvard.iq.dataverse.DataFile)5 User (edu.harvard.iq.dataverse.authorization.users.User)5 HarvestingClient (edu.harvard.iq.dataverse.harvest.client.HarvestingClient)5 JsonObject (javax.json.JsonObject)5 JsonObjectBuilder (javax.json.JsonObjectBuilder)5 DepositReceipt (org.swordapp.server.DepositReceipt)5 RoleAssignment (edu.harvard.iq.dataverse.RoleAssignment)4