use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class UpdateDataverseCommand method execute.
@Override
public Dataverse execute(CommandContext ctxt) throws CommandException {
Dataverse result = ctxt.dataverses().save(editedDv);
if (facetList != null) {
ctxt.facets().deleteFacetsFor(result);
int i = 0;
for (DatasetFieldType df : facetList) {
ctxt.facets().create(i++, df.getId(), result.getId());
}
}
if (featuredDataverseList != null) {
ctxt.featuredDataverses().deleteFeaturedDataversesFor(result);
int i = 0;
for (Object obj : featuredDataverseList) {
Dataverse dv = (Dataverse) obj;
ctxt.featuredDataverses().create(i++, dv.getId(), result.getId());
}
}
if (inputLevelList != null) {
ctxt.fieldTypeInputLevels().deleteFacetsFor(result);
for (DataverseFieldTypeInputLevel obj : inputLevelList) {
ctxt.fieldTypeInputLevels().create(obj);
}
}
ctxt.index().indexDataverse(result);
return result;
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class Datasets method moveDataset.
@POST
@Path("{id}/move/{targetDataverseAlias}")
public Response moveDataset(@PathParam("id") String id, @PathParam("targetDataverseAlias") String targetDataverseAlias, @QueryParam("forceMove") Boolean force) {
try {
System.out.print("force: " + force);
User u = findUserOrDie();
Dataset ds = findDatasetOrDie(id);
Dataverse target = dataverseService.findByAlias(targetDataverseAlias);
if (target == null) {
return error(Response.Status.BAD_REQUEST, "Target Dataverse not found.");
}
// Command requires Super user - it will be tested by the command
execCommand(new MoveDatasetCommand(createDataverseRequest(u), ds, target, force));
return ok("Dataset moved successfully");
} catch (WrappedResponse ex) {
return ex.getResponse();
}
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class ImportServiceBean method createDataverse.
/**
* This is just a convenience method, for testing migration. It creates
* a dummy dataverse with the directory name as dataverse name & alias.
* @param dvName
* @param dataverseRequest
* @return
* @throws ImportException
*/
@TransactionAttribute(REQUIRES_NEW)
public Dataverse createDataverse(String dvName, DataverseRequest dataverseRequest) throws ImportException {
Dataverse d = new Dataverse();
Dataverse root = dataverseService.findByAlias("root");
d.setOwner(root);
d.setAlias(dvName);
d.setName(dvName);
d.setAffiliation("affiliation");
d.setPermissionRoot(false);
d.setDescription("description");
d.setDataverseType(Dataverse.DataverseType.RESEARCHERS);
DataverseContact dc = new DataverseContact();
dc.setContactEmail("pete@mailinator.com");
ArrayList<DataverseContact> dcList = new ArrayList<>();
dcList.add(dc);
d.setDataverseContacts(dcList);
try {
d = engineSvc.submit(new CreateDataverseCommand(d, dataverseRequest, null, null));
} catch (EJBException ex) {
Throwable cause = ex;
StringBuilder sb = new StringBuilder();
sb.append("Error creating dataverse.");
while (cause.getCause() != null) {
cause = cause.getCause();
if (cause instanceof ConstraintViolationException) {
ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause;
for (ConstraintViolation<?> violation : constraintViolationException.getConstraintViolations()) {
sb.append(" Invalid value: <<<").append(violation.getInvalidValue()).append(">>> for ").append(violation.getPropertyPath()).append(" at ").append(violation.getLeafBean()).append(" - ").append(violation.getMessage());
}
}
}
logger.log(Level.SEVERE, sb.toString());
System.out.println("Error creating dataverse: " + sb.toString());
throw new ImportException(sb.toString());
} catch (CommandException e) {
throw new ImportException(e.getMessage());
}
return d;
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class ImportServiceBean method doImportHarvestedDataset.
@TransactionAttribute(TransactionAttributeType.REQUIRES_NEW)
public Dataset doImportHarvestedDataset(DataverseRequest dataverseRequest, HarvestingClient harvestingClient, String harvestIdentifier, String metadataFormat, File metadataFile, PrintWriter cleanupLog) throws ImportException, IOException {
if (harvestingClient == null || harvestingClient.getDataverse() == null) {
throw new ImportException("importHarvestedDataset called wiht a null harvestingClient, or an invalid harvestingClient.");
}
Dataverse owner = harvestingClient.getDataverse();
Dataset importedDataset = null;
DatasetDTO dsDTO = null;
String json = null;
if ("ddi".equalsIgnoreCase(metadataFormat) || "oai_ddi".equals(metadataFormat) || metadataFormat.toLowerCase().matches("^oai_ddi.*")) {
try {
String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
// TODO:
// import type should be configurable - it should be possible to
// select whether you want to harvest with or without files,
// ImportType.HARVEST vs. ImportType.HARVEST_WITH_FILES
logger.fine("importing DDI " + metadataFile.getAbsolutePath());
dsDTO = importDDIService.doImport(ImportType.HARVEST_WITH_FILES, xmlToParse);
} catch (IOException | XMLStreamException | ImportException e) {
throw new ImportException("Failed to process DDI XML record: " + e.getClass() + " (" + e.getMessage() + ")");
}
} else if ("dc".equalsIgnoreCase(metadataFormat) || "oai_dc".equals(metadataFormat)) {
logger.fine("importing DC " + metadataFile.getAbsolutePath());
try {
String xmlToParse = new String(Files.readAllBytes(metadataFile.toPath()));
dsDTO = importGenericService.processOAIDCxml(xmlToParse);
} catch (IOException | XMLStreamException e) {
throw new ImportException("Failed to process Dublin Core XML record: " + e.getClass() + " (" + e.getMessage() + ")");
}
} else if ("dataverse_json".equals(metadataFormat)) {
// This is Dataverse metadata already formatted in JSON.
// Simply read it into a string, and pass to the final import further down:
logger.fine("Attempting to import custom dataverse metadata from file " + metadataFile.getAbsolutePath());
json = new String(Files.readAllBytes(metadataFile.toPath()));
} else {
throw new ImportException("Unsupported import metadata format: " + metadataFormat);
}
if (json == null) {
if (dsDTO != null) {
// convert DTO to Json,
Gson gson = new GsonBuilder().setPrettyPrinting().create();
json = gson.toJson(dsDTO);
logger.fine("JSON produced for the metadata harvested: " + json);
} else {
throw new ImportException("Failed to transform XML metadata format " + metadataFormat + " into a DatasetDTO");
}
}
JsonReader jsonReader = Json.createReader(new StringReader(json));
JsonObject obj = jsonReader.readObject();
// and call parse Json to read it into a dataset
try {
JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
parser.setLenient(true);
Dataset ds = parser.parseDataset(obj);
// For ImportType.NEW, if the metadata contains a global identifier, and it's not a protocol
// we support, it should be rejected.
// (TODO: ! - add some way of keeping track of supported protocols!)
// if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
// throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
// }
ds.setOwner(owner);
ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
// Check data against required contraints
List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
if (!violations.isEmpty()) {
// For migration and harvest, add NA for missing required values
for (ConstraintViolation<DatasetField> v : violations) {
DatasetField f = v.getRootBean();
f.setSingleValue(DatasetField.NA_VALUE);
}
}
// Check data against validation constraints
// If we are migrating and "scrub migration data" is true we attempt to fix invalid data
// if the fix fails stop processing of this file by throwing exception
Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
if (!invalidViolations.isEmpty()) {
for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
DatasetFieldValue f = v.getRootBean();
boolean fixed = false;
boolean converted = false;
// TODO: Is this scrubbing something we want to continue doing?
if (settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
fixed = processMigrationValidationError(f, cleanupLog, metadataFile.getName());
converted = true;
if (fixed) {
Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
if (!scrubbedViolations.isEmpty()) {
fixed = false;
}
}
}
if (!fixed) {
String msg = "Data modified - File: " + metadataFile.getName() + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
cleanupLog.println(msg);
f.setValue(DatasetField.NA_VALUE);
}
}
}
// this dataset:
if (StringUtils.isEmpty(ds.getGlobalId())) {
throw new ImportException("The harvested metadata record with the OAI server identifier " + harvestIdentifier + " does not contain a global unique identifier that we could recognize, skipping.");
}
ds.setHarvestedFrom(harvestingClient);
ds.setHarvestIdentifier(harvestIdentifier);
Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
if (existingDs != null) {
// we are just going to skip it!
if (existingDs.getOwner() != null && !owner.getId().equals(existingDs.getOwner().getId())) {
throw new ImportException("The dataset with the global id " + ds.getGlobalId() + " already exists, in the dataverse " + existingDs.getOwner().getAlias() + ", skipping.");
}
// skip it also:
if (!existingDs.isHarvested()) {
throw new ImportException("A LOCAL dataset with the global id " + ds.getGlobalId() + " already exists in this dataverse; skipping.");
}
// We will replace the current version with the imported version.
if (existingDs.getVersions().size() != 1) {
throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
}
// Purge all the SOLR documents associated with this client from the
// index server:
indexService.deleteHarvestedDocuments(existingDs);
// DeleteFileCommand on them.
for (DataFile harvestedFile : existingDs.getFiles()) {
DataFile merged = em.merge(harvestedFile);
em.remove(merged);
harvestedFile = null;
}
// TODO:
// Verify what happens with the indexed files in SOLR?
// are they going to be overwritten by the reindexing of the dataset?
existingDs.setFiles(null);
Dataset merged = em.merge(existingDs);
engineSvc.submit(new DestroyDatasetCommand(merged, dataverseRequest));
importedDataset = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));
} else {
importedDataset = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, ImportType.HARVEST));
}
} catch (JsonParseException | ImportException | CommandException ex) {
logger.fine("Failed to import harvested dataset: " + ex.getClass() + ": " + ex.getMessage());
FileOutputStream savedJsonFileStream = new FileOutputStream(new File(metadataFile.getAbsolutePath() + ".json"));
byte[] jsonBytes = json.getBytes();
int i = 0;
while (i < jsonBytes.length) {
int chunkSize = i + 8192 <= jsonBytes.length ? 8192 : jsonBytes.length - i;
savedJsonFileStream.write(jsonBytes, i, chunkSize);
i += chunkSize;
savedJsonFileStream.flush();
}
savedJsonFileStream.close();
logger.info("JSON produced saved in " + metadataFile.getAbsolutePath() + ".json");
throw new ImportException("Failed to import harvested dataset: " + ex.getClass() + " (" + ex.getMessage() + ")", ex);
}
return importedDataset;
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class ContainerManagerImpl method getEntry.
@Override
public DepositReceipt getEntry(String uri, Map<String, String> map, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordServerException, SwordError, SwordAuthException {
AuthenticatedUser user = swordAuth.auth(authCredentials);
DataverseRequest dvReq = new DataverseRequest(user, httpRequest);
logger.fine("getEntry called with url: " + uri);
urlManager.processUrl(uri);
String targetType = urlManager.getTargetType();
if (!targetType.isEmpty()) {
logger.fine("operating on target type: " + urlManager.getTargetType());
if ("study".equals(targetType)) {
String globalId = urlManager.getTargetIdentifier();
Dataset dataset = datasetService.findByGlobalId(globalId);
if (dataset != null) {
if (!permissionService.isUserAllowedOn(user, new GetDraftDatasetVersionCommand(dvReq, dataset), dataset)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to retrieve entry for " + dataset.getGlobalId());
}
Dataverse dvThatOwnsDataset = dataset.getOwner();
ReceiptGenerator receiptGenerator = new ReceiptGenerator();
String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
if (depositReceipt != null) {
return depositReceipt;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not generate deposit receipt.");
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataset based on URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unsupported target type (" + targetType + ") in URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to determine target type from URL: " + uri);
}
}
Aggregations