Search in sources :

Example 31 with DatasetVersion

use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.

the class SolrIndexServiceBean method datasetVersionsToBuildCardsFor.

private Set<DatasetVersion> datasetVersionsToBuildCardsFor(Dataset dataset) {
    Set<DatasetVersion> datasetVersions = new HashSet<>();
    DatasetVersion latest = dataset.getLatestVersion();
    if (latest != null) {
        datasetVersions.add(latest);
    }
    DatasetVersion released = dataset.getReleasedVersion();
    if (released != null) {
        datasetVersions.add(released);
    }
    return datasetVersions;
}
Also used : DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) HashSet(java.util.HashSet)

Example 32 with DatasetVersion

use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.

the class SolrIndexServiceBean method reindexFilesInBatches.

private String reindexFilesInBatches(List<DataFile> filesToReindexPermissionsFor) {
    List<SolrInputDocument> docs = new ArrayList<>();
    Map<Long, List<Long>> byParentId = new HashMap<>();
    Map<Long, List<String>> permStringByDatasetVersion = new HashMap<>();
    for (DataFile file : filesToReindexPermissionsFor) {
        Dataset dataset = (Dataset) file.getOwner();
        Map<DatasetVersion.VersionState, Boolean> desiredCards = searchPermissionsService.getDesiredCards(dataset);
        for (DatasetVersion datasetVersionFileIsAttachedTo : datasetVersionsToBuildCardsFor(dataset)) {
            boolean cardShouldExist = desiredCards.get(datasetVersionFileIsAttachedTo.getVersionState());
            if (cardShouldExist) {
                List<String> cachedPermission = permStringByDatasetVersion.get(datasetVersionFileIsAttachedTo.getId());
                if (cachedPermission == null) {
                    logger.fine("no cached permission! Looking it up...");
                    List<DvObjectSolrDoc> fileSolrDocs = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion);
                    for (DvObjectSolrDoc fileSolrDoc : fileSolrDocs) {
                        Long datasetVersionId = fileSolrDoc.getDatasetVersionId();
                        if (datasetVersionId != null) {
                            permStringByDatasetVersion.put(datasetVersionId, fileSolrDoc.getPermissions());
                            SolrInputDocument solrDoc = SearchUtil.createSolrDoc(fileSolrDoc);
                            docs.add(solrDoc);
                        }
                    }
                } else {
                    logger.fine("cached permission is " + cachedPermission);
                    List<DvObjectSolrDoc> fileSolrDocsBasedOnCachedPermissions = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion);
                    for (DvObjectSolrDoc fileSolrDoc : fileSolrDocsBasedOnCachedPermissions) {
                        SolrInputDocument solrDoc = SearchUtil.createSolrDoc(fileSolrDoc);
                        docs.add(solrDoc);
                    }
                }
            }
        }
        Long parent = file.getOwner().getId();
        List<Long> existingList = byParentId.get(parent);
        if (existingList == null) {
            List<Long> empty = new ArrayList<>();
            byParentId.put(parent, empty);
        } else {
            List<Long> updatedList = existingList;
            updatedList.add(file.getId());
            byParentId.put(parent, updatedList);
        }
    }
    try {
        persistToSolr(docs);
        return " " + filesToReindexPermissionsFor.size() + " files indexed across " + docs.size() + " Solr documents ";
    } catch (SolrServerException | IOException ex) {
        return " tried to reindex " + filesToReindexPermissionsFor.size() + " files indexed across " + docs.size() + " Solr documents but caught exception: " + ex;
    }
}
Also used : HashMap(java.util.HashMap) Dataset(edu.harvard.iq.dataverse.Dataset) SolrServerException(org.apache.solr.client.solrj.SolrServerException) ArrayList(java.util.ArrayList) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) IOException(java.io.IOException) DataFile(edu.harvard.iq.dataverse.DataFile) SolrInputDocument(org.apache.solr.common.SolrInputDocument) ArrayList(java.util.ArrayList) List(java.util.List)

Example 33 with DatasetVersion

use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.

the class FileRecordJobListener method beforeJob.

@Override
public void beforeJob() throws Exception {
    Logger jobLogger;
    // initialize logger
    // (the beforeJob() method gets executed before anything else; so we
    // initialize the logger here. everywhere else will be retrieving
    // it with Logger.getLogger(byname) - that should be giving us the
    // same instance, created here - and not creating a new logger)
    jobLogger = LoggingUtil.getJobLogger(Long.toString(jobContext.getInstanceId()));
    // update job properties to be used elsewhere to determine dataset, user and mode
    JobOperator jobOperator = BatchRuntime.getJobOperator();
    jobParams = jobOperator.getParameters(jobContext.getInstanceId());
    // log job info
    jobLogger.log(Level.INFO, "Job ID = " + jobContext.getExecutionId());
    jobLogger.log(Level.INFO, "Job Name = " + jobContext.getJobName());
    jobLogger.log(Level.INFO, "Job Status = " + jobContext.getBatchStatus());
    jobParams.setProperty("datasetGlobalId", getDatasetGlobalId());
    jobParams.setProperty("userId", getUserId());
    jobParams.setProperty("mode", getMode());
    uploadFolder = jobParams.getProperty("uploadFolder");
    // check constraints for running the job
    if (canRunJob()) {
        // if mode = REPLACE, remove all filemetadata from the dataset version and start fresh
        if (mode.equalsIgnoreCase(ImportMode.REPLACE.name())) {
            try {
                DatasetVersion workingVersion = dataset.getEditVersion();
                List<FileMetadata> fileMetadataList = workingVersion.getFileMetadatas();
                jobLogger.log(Level.INFO, "Removing any existing file metadata since mode = REPLACE");
                for (FileMetadata fmd : fileMetadataList) {
                    dataFileServiceBean.deleteFromVersion(workingVersion, fmd.getDataFile());
                }
            } catch (Exception e) {
                jobLogger.log(Level.SEVERE, "Removing existing file metadata in REPLACE mode: " + e.getMessage());
            }
        }
        // load the checksum manifest
        loadChecksumManifest();
    } else {
        jobContext.setExitStatus("FAILED");
    }
}
Also used : FileMetadata(edu.harvard.iq.dataverse.FileMetadata) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion) JobOperator(javax.batch.operations.JobOperator) Logger(java.util.logging.Logger) JobSecurityException(javax.batch.operations.JobSecurityException) JsonProcessingException(com.fasterxml.jackson.core.JsonProcessingException) IOException(java.io.IOException) NoSuchJobExecutionException(javax.batch.operations.NoSuchJobExecutionException)

Example 34 with DatasetVersion

use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.

the class AddReplaceFileHelper method runMajorCleanup.

private boolean runMajorCleanup() {
    // (1) remove unsaved files from the working version
    removeUnSavedFilesFromWorkingVersion();
    // ----------------------------------------------------
    // (2) if the working version is brand new, delete it
    // It doesn't have an "id" so you can't use the DeleteDatasetVersionCommand
    // ----------------------------------------------------
    // Remove this working version from the dataset
    Iterator<DatasetVersion> versionIterator = dataset.getVersions().iterator();
    msgt("Clear Files");
    while (versionIterator.hasNext()) {
        DatasetVersion dsv = versionIterator.next();
        if (dsv.getId() == null) {
            versionIterator.remove();
        }
    }
    return true;
}
Also used : DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion)

Example 35 with DatasetVersion

use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.

the class AddReplaceFileHelper method step_007_auto_isReplacementInLatestVersion.

/**
 * Make sure the file to replace is in the workingVersion
 *  -- e.g. that it wasn't deleted from a previous Version
 *
 * @return
 */
private boolean step_007_auto_isReplacementInLatestVersion(DataFile existingFile) {
    if (existingFile == null) {
        throw new NullPointerException("existingFile cannot be null!");
    }
    if (this.hasError()) {
        return false;
    }
    DatasetVersion latestVersion = existingFile.getOwner().getLatestVersion();
    boolean fileInLatestVersion = false;
    for (FileMetadata fm : latestVersion.getFileMetadatas()) {
        if (fm.getDataFile().getId() != null) {
            if (Objects.equals(existingFile.getId(), fm.getDataFile().getId())) {
                fileInLatestVersion = true;
            }
        }
    }
    if (!fileInLatestVersion) {
        addError(getBundleErr("existing_file_not_in_latest_published_version"));
        return false;
    }
    return true;
}
Also used : FileMetadata(edu.harvard.iq.dataverse.FileMetadata) DatasetVersion(edu.harvard.iq.dataverse.DatasetVersion)

Aggregations

DatasetVersion (edu.harvard.iq.dataverse.DatasetVersion)85 Dataset (edu.harvard.iq.dataverse.Dataset)43 Test (org.junit.Test)36 FileMetadata (edu.harvard.iq.dataverse.FileMetadata)25 ArrayList (java.util.ArrayList)24 DataFile (edu.harvard.iq.dataverse.DataFile)19 JsonObject (javax.json.JsonObject)15 Date (java.util.Date)13 StringReader (java.io.StringReader)11 IOException (java.io.IOException)10 AuthenticatedUser (edu.harvard.iq.dataverse.authorization.users.AuthenticatedUser)9 Timestamp (java.sql.Timestamp)9 EJBException (javax.ejb.EJBException)9 JsonParser (edu.harvard.iq.dataverse.util.json.JsonParser)8 JsonObjectBuilder (javax.json.JsonObjectBuilder)8 JsonReader (javax.json.JsonReader)8 DatasetField (edu.harvard.iq.dataverse.DatasetField)7 Dataverse (edu.harvard.iq.dataverse.Dataverse)7 RoleAssignment (edu.harvard.iq.dataverse.RoleAssignment)7 DataverseRequest (edu.harvard.iq.dataverse.engine.command.DataverseRequest)7