use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class SolrIndexServiceBean method datasetVersionsToBuildCardsFor.
private Set<DatasetVersion> datasetVersionsToBuildCardsFor(Dataset dataset) {
Set<DatasetVersion> datasetVersions = new HashSet<>();
DatasetVersion latest = dataset.getLatestVersion();
if (latest != null) {
datasetVersions.add(latest);
}
DatasetVersion released = dataset.getReleasedVersion();
if (released != null) {
datasetVersions.add(released);
}
return datasetVersions;
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class SolrIndexServiceBean method reindexFilesInBatches.
private String reindexFilesInBatches(List<DataFile> filesToReindexPermissionsFor) {
List<SolrInputDocument> docs = new ArrayList<>();
Map<Long, List<Long>> byParentId = new HashMap<>();
Map<Long, List<String>> permStringByDatasetVersion = new HashMap<>();
for (DataFile file : filesToReindexPermissionsFor) {
Dataset dataset = (Dataset) file.getOwner();
Map<DatasetVersion.VersionState, Boolean> desiredCards = searchPermissionsService.getDesiredCards(dataset);
for (DatasetVersion datasetVersionFileIsAttachedTo : datasetVersionsToBuildCardsFor(dataset)) {
boolean cardShouldExist = desiredCards.get(datasetVersionFileIsAttachedTo.getVersionState());
if (cardShouldExist) {
List<String> cachedPermission = permStringByDatasetVersion.get(datasetVersionFileIsAttachedTo.getId());
if (cachedPermission == null) {
logger.fine("no cached permission! Looking it up...");
List<DvObjectSolrDoc> fileSolrDocs = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion);
for (DvObjectSolrDoc fileSolrDoc : fileSolrDocs) {
Long datasetVersionId = fileSolrDoc.getDatasetVersionId();
if (datasetVersionId != null) {
permStringByDatasetVersion.put(datasetVersionId, fileSolrDoc.getPermissions());
SolrInputDocument solrDoc = SearchUtil.createSolrDoc(fileSolrDoc);
docs.add(solrDoc);
}
}
} else {
logger.fine("cached permission is " + cachedPermission);
List<DvObjectSolrDoc> fileSolrDocsBasedOnCachedPermissions = constructDatafileSolrDocs((DataFile) file, permStringByDatasetVersion);
for (DvObjectSolrDoc fileSolrDoc : fileSolrDocsBasedOnCachedPermissions) {
SolrInputDocument solrDoc = SearchUtil.createSolrDoc(fileSolrDoc);
docs.add(solrDoc);
}
}
}
}
Long parent = file.getOwner().getId();
List<Long> existingList = byParentId.get(parent);
if (existingList == null) {
List<Long> empty = new ArrayList<>();
byParentId.put(parent, empty);
} else {
List<Long> updatedList = existingList;
updatedList.add(file.getId());
byParentId.put(parent, updatedList);
}
}
try {
persistToSolr(docs);
return " " + filesToReindexPermissionsFor.size() + " files indexed across " + docs.size() + " Solr documents ";
} catch (SolrServerException | IOException ex) {
return " tried to reindex " + filesToReindexPermissionsFor.size() + " files indexed across " + docs.size() + " Solr documents but caught exception: " + ex;
}
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class FileRecordJobListener method beforeJob.
@Override
public void beforeJob() throws Exception {
Logger jobLogger;
// initialize logger
// (the beforeJob() method gets executed before anything else; so we
// initialize the logger here. everywhere else will be retrieving
// it with Logger.getLogger(byname) - that should be giving us the
// same instance, created here - and not creating a new logger)
jobLogger = LoggingUtil.getJobLogger(Long.toString(jobContext.getInstanceId()));
// update job properties to be used elsewhere to determine dataset, user and mode
JobOperator jobOperator = BatchRuntime.getJobOperator();
jobParams = jobOperator.getParameters(jobContext.getInstanceId());
// log job info
jobLogger.log(Level.INFO, "Job ID = " + jobContext.getExecutionId());
jobLogger.log(Level.INFO, "Job Name = " + jobContext.getJobName());
jobLogger.log(Level.INFO, "Job Status = " + jobContext.getBatchStatus());
jobParams.setProperty("datasetGlobalId", getDatasetGlobalId());
jobParams.setProperty("userId", getUserId());
jobParams.setProperty("mode", getMode());
uploadFolder = jobParams.getProperty("uploadFolder");
// check constraints for running the job
if (canRunJob()) {
// if mode = REPLACE, remove all filemetadata from the dataset version and start fresh
if (mode.equalsIgnoreCase(ImportMode.REPLACE.name())) {
try {
DatasetVersion workingVersion = dataset.getEditVersion();
List<FileMetadata> fileMetadataList = workingVersion.getFileMetadatas();
jobLogger.log(Level.INFO, "Removing any existing file metadata since mode = REPLACE");
for (FileMetadata fmd : fileMetadataList) {
dataFileServiceBean.deleteFromVersion(workingVersion, fmd.getDataFile());
}
} catch (Exception e) {
jobLogger.log(Level.SEVERE, "Removing existing file metadata in REPLACE mode: " + e.getMessage());
}
}
// load the checksum manifest
loadChecksumManifest();
} else {
jobContext.setExitStatus("FAILED");
}
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class AddReplaceFileHelper method runMajorCleanup.
private boolean runMajorCleanup() {
// (1) remove unsaved files from the working version
removeUnSavedFilesFromWorkingVersion();
// ----------------------------------------------------
// (2) if the working version is brand new, delete it
// It doesn't have an "id" so you can't use the DeleteDatasetVersionCommand
// ----------------------------------------------------
// Remove this working version from the dataset
Iterator<DatasetVersion> versionIterator = dataset.getVersions().iterator();
msgt("Clear Files");
while (versionIterator.hasNext()) {
DatasetVersion dsv = versionIterator.next();
if (dsv.getId() == null) {
versionIterator.remove();
}
}
return true;
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class AddReplaceFileHelper method step_007_auto_isReplacementInLatestVersion.
/**
* Make sure the file to replace is in the workingVersion
* -- e.g. that it wasn't deleted from a previous Version
*
* @return
*/
private boolean step_007_auto_isReplacementInLatestVersion(DataFile existingFile) {
if (existingFile == null) {
throw new NullPointerException("existingFile cannot be null!");
}
if (this.hasError()) {
return false;
}
DatasetVersion latestVersion = existingFile.getOwner().getLatestVersion();
boolean fileInLatestVersion = false;
for (FileMetadata fm : latestVersion.getFileMetadatas()) {
if (fm.getDataFile().getId() != null) {
if (Objects.equals(existingFile.getId(), fm.getDataFile().getId())) {
fileInLatestVersion = true;
}
}
}
if (!fileInLatestVersion) {
addError(getBundleErr("existing_file_not_in_latest_published_version"));
return false;
}
return true;
}
Aggregations