use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class Admin method getDatasetThumbnailMetadata.
/**
* This method is used in API tests, called from UtilIt.java.
*/
@GET
@Path("datasets/thumbnailMetadata/{id}")
public Response getDatasetThumbnailMetadata(@PathParam("id") Long idSupplied) {
Dataset dataset = datasetSvc.find(idSupplied);
if (dataset == null) {
return error(Response.Status.NOT_FOUND, "Could not find dataset based on id supplied: " + idSupplied + ".");
}
JsonObjectBuilder data = Json.createObjectBuilder();
DatasetThumbnail datasetThumbnail = dataset.getDatasetThumbnail();
data.add("isUseGenericThumbnail", dataset.isUseGenericThumbnail());
data.add("datasetLogoPresent", DatasetUtil.isDatasetLogoPresent(dataset));
if (datasetThumbnail != null) {
data.add("datasetThumbnailBase64image", datasetThumbnail.getBase64image());
DataFile dataFile = datasetThumbnail.getDataFile();
if (dataFile != null) {
/**
* @todo Change this from a String to a long.
*/
data.add("dataFileId", dataFile.getId().toString());
}
}
return ok(data);
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class ImportServiceBean method doImport.
public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException {
String status = "";
Long createdId = null;
DatasetDTO dsDTO = null;
try {
dsDTO = importDDIService.doImport(importType, xmlToParse);
} catch (XMLStreamException e) {
throw new ImportException("XMLStreamException" + e);
}
// convert DTO to Json,
Gson gson = new GsonBuilder().setPrettyPrinting().create();
String json = gson.toJson(dsDTO);
JsonReader jsonReader = Json.createReader(new StringReader(json));
JsonObject obj = jsonReader.readObject();
// and call parse Json to read it into a dataset
try {
JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
parser.setLenient(!importType.equals(ImportType.NEW));
Dataset ds = parser.parseDataset(obj);
// we support, it will be rejected.
if (importType.equals(ImportType.NEW)) {
if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
}
}
ds.setOwner(owner);
ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
// Check data against required contraints
List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
if (!violations.isEmpty()) {
if (importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) {
// For migration and harvest, add NA for missing required values
for (ConstraintViolation<DatasetField> v : violations) {
DatasetField f = v.getRootBean();
f.setSingleValue(DatasetField.NA_VALUE);
}
} else {
// when importing a new dataset, the import will fail
// if required values are missing.
String errMsg = "Error importing data:";
for (ConstraintViolation<DatasetField> v : violations) {
errMsg += " " + v.getMessage();
}
throw new ImportException(errMsg);
}
}
// Check data against validation constraints
// If we are migrating and "scrub migration data" is true we attempt to fix invalid data
// if the fix fails stop processing of this file by throwing exception
Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
if (!invalidViolations.isEmpty()) {
for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
DatasetFieldValue f = v.getRootBean();
boolean fixed = false;
boolean converted = false;
if ((importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) && settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
fixed = processMigrationValidationError(f, cleanupLog, fileName);
converted = true;
if (fixed) {
Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
if (!scrubbedViolations.isEmpty()) {
fixed = false;
}
}
}
if (!fixed) {
if (importType.equals(ImportType.HARVEST)) {
String msg = "Data modified - File: " + fileName + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
cleanupLog.println(msg);
f.setValue(DatasetField.NA_VALUE);
} else {
String msg = " Validation error for ";
if (converted) {
msg += "converted ";
}
msg += "value: " + f.getValue() + ", " + f.getValidationMessage();
throw new ImportException(msg);
}
}
}
}
Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
if (existingDs != null) {
if (importType.equals(ImportType.HARVEST)) {
// We will replace the current version with the imported version.
if (existingDs.getVersions().size() != 1) {
throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
}
engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest));
Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
status = " updated dataset, id=" + managedDs.getId() + ".";
} else {
// check that the version number isn't already in the dataset
for (DatasetVersion dsv : existingDs.getVersions()) {
if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) {
throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId());
}
}
DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0)));
status = " created datasetVersion, for dataset " + dsv.getDataset().getGlobalId();
createdId = dsv.getId();
}
} else {
Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
status = " created dataset, id=" + managedDs.getId() + ".";
createdId = managedDs.getId();
}
} catch (JsonParseException ex) {
logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage());
throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex);
} catch (CommandException ex) {
logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage());
throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex);
}
return Json.createObjectBuilder().add("message", status);
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class ContainerManagerImpl method replaceMetadata.
@Override
public DepositReceipt replaceMetadata(String uri, Deposit deposit, AuthCredentials authCredentials, SwordConfiguration swordConfiguration) throws SwordError, SwordServerException, SwordAuthException {
AuthenticatedUser user = swordAuth.auth(authCredentials);
DataverseRequest dvReq = new DataverseRequest(user, httpRequest);
logger.fine("replaceMetadata called with url: " + uri);
urlManager.processUrl(uri);
String targetType = urlManager.getTargetType();
if (!targetType.isEmpty()) {
logger.fine("operating on target type: " + urlManager.getTargetType());
if ("dataverse".equals(targetType)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Metadata replace of dataverse is not supported.");
} else if ("study".equals(targetType)) {
logger.fine("replacing metadata for dataset");
// do a sanity check on the XML received
try {
SwordEntry swordEntry = deposit.getSwordEntry();
logger.fine("deposit XML received by replaceMetadata():\n" + swordEntry);
} catch (ParseException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can not replace dataset metadata due to malformed Atom entry: " + ex);
}
String globalId = urlManager.getTargetIdentifier();
Dataset dataset = datasetService.findByGlobalId(globalId);
if (dataset != null) {
Dataverse dvThatOwnsDataset = dataset.getOwner();
UpdateDatasetCommand updateDatasetCommand = new UpdateDatasetCommand(dataset, dvReq);
if (!permissionService.isUserAllowedOn(user, updateDatasetCommand, dataset)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "User " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataverse " + dvThatOwnsDataset.getAlias());
}
DatasetVersion datasetVersion = dataset.getEditVersion();
// erase all metadata before creating populating dataset version
List<DatasetField> emptyDatasetFields = new ArrayList<>();
datasetVersion.setDatasetFields(emptyDatasetFields);
String foreignFormat = SwordUtil.DCTERMS;
try {
importGenericService.importXML(deposit.getSwordEntry().toString(), foreignFormat, datasetVersion);
} catch (Exception ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem calling importXML: " + ex);
}
swordService.addDatasetContact(datasetVersion, user);
swordService.addDatasetDepositor(datasetVersion, user);
swordService.addDatasetSubjectIfMissing(datasetVersion);
swordService.setDatasetLicenseAndTermsOfUse(datasetVersion, deposit.getSwordEntry());
try {
engineSvc.submit(updateDatasetCommand);
} catch (CommandException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem updating dataset: " + ex);
}
ReceiptGenerator receiptGenerator = new ReceiptGenerator();
String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
return depositReceipt;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataset based on global id (" + globalId + ") in URL: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unknown target type specified on which to replace metadata: " + uri);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "No target specified on which to replace metadata: " + uri);
}
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class JsonPrinterTest method testJson_PrivateUrl.
@Test
public void testJson_PrivateUrl() {
DataverseRole aRole = new DataverseRole();
PrivateUrlUser privateUrlUserIn = new PrivateUrlUser(42);
RoleAssignee anAssignee = privateUrlUserIn;
Dataset dataset = new Dataset();
String privateUrlToken = "e1d53cf6-794a-457a-9709-7c07629a8267";
RoleAssignment ra = new RoleAssignment(aRole, anAssignee, dataset, privateUrlToken);
String dataverseSiteUrl = "https://dataverse.example.edu";
PrivateUrl privateUrl = new PrivateUrl(ra, dataset, dataverseSiteUrl);
JsonObjectBuilder job = JsonPrinter.json(privateUrl);
assertNotNull(job);
JsonObject jsonObject = job.build();
assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("token"));
assertEquals("https://dataverse.example.edu/privateurl.xhtml?token=e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getString("link"));
assertEquals("e1d53cf6-794a-457a-9709-7c07629a8267", jsonObject.getJsonObject("roleAssignment").getString("privateUrlToken"));
assertEquals("#42", jsonObject.getJsonObject("roleAssignment").getString("assignee"));
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class JsonParserTest method testparseFiles.
@Test
public void testparseFiles() throws JsonParseException {
JsonArrayBuilder metadatasJsonBuilder = Json.createArrayBuilder();
JsonObjectBuilder fileMetadataGood = Json.createObjectBuilder();
fileMetadataGood.add("label", "myLabel");
JsonObjectBuilder fileGood = Json.createObjectBuilder();
fileMetadataGood.add("dataFile", fileGood);
fileMetadataGood.add("categories", Json.createArrayBuilder().add("Documentation"));
JsonObjectBuilder fileMetadataBad = Json.createObjectBuilder();
fileMetadataBad.add("label", "bad");
JsonObjectBuilder fileBad = Json.createObjectBuilder();
fileMetadataBad.add("dataFile", fileBad);
fileMetadataBad.add("categories", Json.createArrayBuilder().add(BigDecimal.ONE));
metadatasJsonBuilder.add(fileMetadataGood);
metadatasJsonBuilder.add(fileMetadataBad);
JsonArray metadatasJson = metadatasJsonBuilder.build();
DatasetVersion dsv = new DatasetVersion();
Dataset dataset = new Dataset();
dsv.setDataset(dataset);
List<FileMetadata> fileMetadatas = new JsonParser().parseFiles(metadatasJson, dsv);
System.out.println("fileMetadatas: " + fileMetadatas);
assertEquals("myLabel", fileMetadatas.get(0).getLabel());
assertEquals("Documentation", fileMetadatas.get(0).getCategories().get(0).getName());
assertEquals(null, fileMetadatas.get(1).getCategories());
List<FileMetadata> codeCoverage = new JsonParser().parseFiles(Json.createArrayBuilder().add(Json.createObjectBuilder().add("label", "myLabel").add("dataFile", Json.createObjectBuilder().add("categories", JsonValue.NULL))).build(), dsv);
assertEquals(null, codeCoverage.get(0).getCategories());
}
Aggregations