use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class Index method filesearch.
@GET
@Path("filesearch")
public Response filesearch(@QueryParam("persistentId") String persistentId, @QueryParam("semanticVersion") String semanticVersion, @QueryParam("q") String userSuppliedQuery) {
Dataset dataset = datasetService.findByGlobalId(persistentId);
if (dataset == null) {
return error(Status.BAD_REQUEST, "Could not find dataset with persistent id " + persistentId);
}
User user = GuestUser.get();
try {
AuthenticatedUser authenticatedUser = findAuthenticatedUserOrDie();
if (authenticatedUser != null) {
user = authenticatedUser;
}
} catch (WrappedResponse ex) {
}
RetrieveDatasetVersionResponse datasetVersionResponse = datasetVersionService.retrieveDatasetVersionByPersistentId(persistentId, semanticVersion);
if (datasetVersionResponse == null) {
return error(Status.BAD_REQUEST, "Problem searching for files. Could not find dataset version based on " + persistentId + " and " + semanticVersion);
}
DatasetVersion datasetVersion = datasetVersionResponse.getDatasetVersion();
FileView fileView = searchFilesService.getFileView(datasetVersion, user, userSuppliedQuery);
if (fileView == null) {
return error(Status.BAD_REQUEST, "Problem searching for files. Null returned from getFileView.");
}
JsonArrayBuilder filesFound = Json.createArrayBuilder();
JsonArrayBuilder cards = Json.createArrayBuilder();
JsonArrayBuilder fileIds = Json.createArrayBuilder();
for (SolrSearchResult result : fileView.getSolrSearchResults()) {
cards.add(result.getNameSort());
fileIds.add(result.getEntityId());
JsonObjectBuilder fileFound = Json.createObjectBuilder();
fileFound.add("name", result.getNameSort());
fileFound.add("entityId", result.getEntityId().toString());
fileFound.add("datasetVersionId", result.getDatasetVersionId());
fileFound.add("datasetId", result.getParent().get(SearchFields.ID));
filesFound.add(fileFound);
}
JsonArrayBuilder facets = Json.createArrayBuilder();
for (FacetCategory facetCategory : fileView.getFacetCategoryList()) {
facets.add(facetCategory.getFriendlyName());
}
JsonArrayBuilder filterQueries = Json.createArrayBuilder();
for (String filterQuery : fileView.getFilterQueries()) {
filterQueries.add(filterQuery);
}
JsonArrayBuilder allDatasetVersionIds = Json.createArrayBuilder();
for (DatasetVersion dsVersion : dataset.getVersions()) {
allDatasetVersionIds.add(dsVersion.getId());
}
JsonObjectBuilder data = Json.createObjectBuilder();
data.add("filesFound", filesFound);
data.add("cards", cards);
data.add("fileIds", fileIds);
data.add("facets", facets);
data.add("user", user.getIdentifier());
data.add("persistentID", persistentId);
data.add("query", fileView.getQuery());
data.add("filterQueries", filterQueries);
data.add("allDataverVersionIds", allDatasetVersionIds);
data.add("semanticVersion", datasetVersion.getSemanticVersion());
return ok(data);
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class WorldMapRelatedData method getWorldMapDatafileInfo.
/**
* Retrieve FileMetadata for Use by WorldMap.
* This includes information about the DataFile, Dataset, DatasetVersion, and Dataverse
*
* @param jsonTokenData
* @param request
* @return
*/
@POST
// + "{worldmap_token}")
@Path(GET_WORLDMAP_DATAFILE_API_PATH_FRAGMENT)
public Response getWorldMapDatafileInfo(String jsonTokenData, @Context HttpServletRequest request) {
// , @PathParam("worldmap_token") String worldmapTokenParam) {
if (true) {
// return okResponse("Currently deactivated");
// return okResponse("remote server: " + request.getRemoteAddr());
}
logger.info("API call: getWorldMapDatafileInfo");
// ----------------------------------
// Auth check: Parse the json message and check for a valid GEOCONNECT_TOKEN_KEY and GEOCONNECT_TOKEN_VALUE
// -- For testing, the GEOCONNECT_TOKEN_VALUE will be dynamic, found in the db
// ----------------------------------
logger.info("(1) jsonTokenData: " + jsonTokenData);
// Parse JSON
JsonObject jsonTokenInfo;
try (StringReader rdr = new StringReader(jsonTokenData)) {
jsonTokenInfo = Json.createReader(rdr).readObject();
} catch (JsonParsingException jpe) {
logger.log(Level.SEVERE, "Json: " + jsonTokenData);
return error(Response.Status.BAD_REQUEST, "Error parsing Json: " + jpe.getMessage());
}
logger.info("(1a) jsonTokenInfo: " + jsonTokenInfo);
// Retrieve token string
String worldmapTokenParam = this.retrieveTokenValueFromJson(jsonTokenInfo);
logger.info("(1b) token from JSON: " + worldmapTokenParam);
if (worldmapTokenParam == null) {
return error(Response.Status.BAD_REQUEST, "Token not found in JSON request.");
}
// Retrieve WorldMapToken and make sure it is valid
//
WorldMapToken wmToken = tokenServiceBean.retrieveAndRefreshValidToken(worldmapTokenParam);
logger.info("(2) token retrieved from db: " + wmToken);
if (wmToken == null) {
return error(Response.Status.UNAUTHORIZED, "No access. Invalid token.");
}
// Make sure the token's User still has permissions to access the file
//
logger.info("(3) check permissions");
if (!(tokenServiceBean.canTokenUserEditFile(wmToken))) {
tokenServiceBean.expireToken(wmToken);
return error(Response.Status.UNAUTHORIZED, "No access. Invalid token.");
}
// (1) Retrieve token connected data: DataverseUser, DataFile
//
// Make sure token user and file are still available
//
AuthenticatedUser dvUser = wmToken.getDataverseUser();
if (dvUser == null) {
return error(Response.Status.NOT_FOUND, "DataverseUser not found for token");
}
DataFile dfile = wmToken.getDatafile();
if (dfile == null) {
return error(Response.Status.NOT_FOUND, "DataFile not found for token");
}
// (1a) Retrieve FileMetadata
FileMetadata dfile_meta = dfile.getFileMetadata();
if (dfile_meta == null) {
return error(Response.Status.NOT_FOUND, "FileMetadata not found");
}
// (2) Now get the dataset and the latest DatasetVersion
Dataset dset = dfile.getOwner();
if (dset == null) {
return error(Response.Status.NOT_FOUND, "Owning Dataset for this DataFile not found");
}
// (2a) latest DatasetVersion
// !! How do you check if the lastest version has this specific file?
//
DatasetVersion dset_version = dset.getLatestVersion();
if (dset_version == null) {
return error(Response.Status.NOT_FOUND, "Latest DatasetVersion for this DataFile not found");
}
// (3) get Dataverse
Dataverse dverse = dset.getOwner();
if (dverse == null) {
return error(Response.Status.NOT_FOUND, "Dataverse for this DataFile's Dataset not found");
}
// (4) Roll it all up in a JSON response
final JsonObjectBuilder jsonData = Json.createObjectBuilder();
// ------------------------------------
if (dfile.isShapefileType()) {
jsonData.add("mapping_type", "shapefile");
} else if (dfile.isTabularData()) {
jsonData.add("mapping_type", "tabular");
} else {
logger.log(Level.SEVERE, "This was neither a Shapefile nor a Tabular data file. DataFile id: " + dfile.getId());
return error(Response.Status.BAD_REQUEST, "Sorry! This file does not have mapping data. Please contact the Dataverse administrator. DataFile id: " + dfile.getId());
}
// ------------------------------------
// DataverseUser Info
// ------------------------------------
jsonData.add("dv_user_id", dvUser.getId());
jsonData.add("dv_username", dvUser.getUserIdentifier());
jsonData.add("dv_user_email", dvUser.getEmail());
// ------------------------------------
// Dataverse URLs to this server
// ------------------------------------
String serverName = systemConfig.getDataverseSiteUrl();
jsonData.add("return_to_dataverse_url", dset_version.getReturnToFilePageURL(serverName, dset, dfile));
jsonData.add("datafile_download_url", dfile.getMapItFileDownloadURL(serverName));
// ------------------------------------
// Dataverse
// ------------------------------------
// jsonData.add("dataverse_installation_name", "Harvard Dataverse"); // todo / fix
// is this enough to distinguish a dataverse installation?
jsonData.add("dataverse_installation_name", systemConfig.getDataverseSiteUrl());
jsonData.add("dataverse_id", dverse.getId());
jsonData.add("dataverse_name", dverse.getName());
String dataverseDesc = dverse.getDescription();
if (dataverseDesc == null || dataverseDesc.equalsIgnoreCase("")) {
dataverseDesc = "";
}
jsonData.add("dataverse_description", dataverseDesc);
// ------------------------------------
// Dataset Info
// ------------------------------------
jsonData.add("dataset_id", dset.getId());
// ------------------------------------
// DatasetVersion Info
// ------------------------------------
// database id
jsonData.add("dataset_version_id", dset_version.getId());
// major/minor version number, e.g. 3.1
jsonData.add("dataset_semantic_version", dset_version.getSemanticVersion());
jsonData.add("dataset_name", dset_version.getTitle());
jsonData.add("dataset_citation", dset_version.getCitation(true));
// Need to fix to/do
jsonData.add("dataset_description", "");
jsonData.add("dataset_is_public", dset_version.isReleased());
// ------------------------------------
// DataFile/FileMetaData Info
// ------------------------------------
jsonData.add("datafile_id", dfile.getId());
jsonData.add("datafile_label", dfile_meta.getLabel());
// jsonData.add("filename", dfile_meta.getLabel());
jsonData.add("datafile_expected_md5_checksum", dfile.getChecksumValue());
Long fsize = dfile.getFilesize();
if (fsize == null) {
fsize = new Long(-1);
}
jsonData.add("datafile_filesize", fsize);
jsonData.add("datafile_content_type", dfile.getContentType());
jsonData.add("datafile_create_datetime", dfile.getCreateDate().toString());
// restriction status of the DataFile
jsonData.add("datafile_is_restricted", dfile.isRestricted());
return ok(jsonData);
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class MediaResourceManagerImpl method replaceOrAddFiles.
DepositReceipt replaceOrAddFiles(String uri, Deposit deposit, AuthCredentials authCredentials, SwordConfiguration swordConfiguration, boolean shouldReplace) throws SwordError, SwordAuthException, SwordServerException {
AuthenticatedUser user = swordAuth.auth(authCredentials);
DataverseRequest dvReq = new DataverseRequest(user, httpRequest);
urlManager.processUrl(uri);
String globalId = urlManager.getTargetIdentifier();
if (urlManager.getTargetType().equals("study") && globalId != null) {
logger.fine("looking up dataset with globalId " + globalId);
Dataset dataset = datasetService.findByGlobalId(globalId);
if (dataset == null) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataset with global ID of " + globalId);
}
UpdateDatasetCommand updateDatasetCommand = new UpdateDatasetCommand(dataset, dvReq);
if (!permissionService.isUserAllowedOn(user, updateDatasetCommand, dataset)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to modify dataset with global ID " + dataset.getGlobalId());
}
// -------------------------------------
if (DataCaptureModuleUtil.rsyncSupportEnabled(settingsSvc.getValueForKey(SettingsServiceBean.Key.UploadMethods))) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, SettingsServiceBean.Key.UploadMethods + " contains " + SystemConfig.FileUploadMethods.RSYNC + ". Please use rsync file upload.");
}
/**
* @todo decide if we want non zip files to work. Technically, now
* that we're letting ingestService.createDataFiles unpack the zip
* for us, the following *does* work:
*
* curl--data-binary @path/to/trees.png -H "Content-Disposition:
* filename=trees.png" -H "Content-Type: image/png" -H "Packaging:
* http://purl.org/net/sword/package/SimpleZip"
*
* We *might* want to continue to force API users to only upload zip
* files so that some day we can support a including a file or files
* that contain the metadata (i.e. description) for each file in the
* zip: https://github.com/IQSS/dataverse/issues/723
*/
if (!deposit.getPackaging().equals(UriRegistry.PACKAGE_SIMPLE_ZIP)) {
throw new SwordError(UriRegistry.ERROR_CONTENT, 415, "Package format " + UriRegistry.PACKAGE_SIMPLE_ZIP + " is required but format specified in 'Packaging' HTTP header was " + deposit.getPackaging());
}
String uploadedZipFilename = deposit.getFilename();
DatasetVersion editVersion = dataset.getEditVersion();
if (deposit.getInputStream() == null) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Deposit input stream was null.");
}
int bytesAvailableInInputStream = 0;
try {
bytesAvailableInInputStream = deposit.getInputStream().available();
} catch (IOException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not determine number of bytes available in input stream: " + ex);
}
if (bytesAvailableInInputStream == 0) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Bytes available in input stream was " + bytesAvailableInInputStream + ". Please check the file you are attempting to deposit.");
}
/**
* @todo Think about if we should instead pass in "application/zip"
* rather than letting ingestService.createDataFiles() guess the
* contentType by passing it "null". See also the note above about
* SimpleZip vs. other contentTypes.
*/
String guessContentTypeForMe = null;
List<DataFile> dataFiles = new ArrayList<>();
try {
try {
dataFiles = FileUtil.createDataFiles(editVersion, deposit.getInputStream(), uploadedZipFilename, guessContentTypeForMe, systemConfig);
} catch (EJBException ex) {
Throwable cause = ex.getCause();
if (cause != null) {
if (cause instanceof IllegalArgumentException) {
/**
* @todo should be safe to remove this catch of
* EJBException and IllegalArgumentException once
* this ticket is resolved:
*
* IllegalArgumentException: MALFORMED when
* uploading certain zip files
* https://github.com/IQSS/dataverse/issues/1021
*/
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. Problem with zip file, perhaps: " + cause);
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + cause);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles. No cause: " + ex.getMessage());
}
}
/*TODO: L.A. 4.6! catch (FileExceedsMaxSizeException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Exception caught calling ingestService.createDataFiles: " + ex.getMessage());
//Logger.getLogger(MediaResourceManagerImpl.class.getName()).log(Level.SEVERE, null, ex);
}*/
} catch (IOException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + ex.getMessage());
}
if (!dataFiles.isEmpty()) {
Set<ConstraintViolation> constraintViolations = editVersion.validate();
if (constraintViolations.size() > 0) {
ConstraintViolation violation = constraintViolations.iterator().next();
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to add file(s) to dataset: " + violation.getMessage() + " The invalid value was \"" + violation.getInvalidValue() + "\".");
} else {
ingestService.addFiles(editVersion, dataFiles);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "No files to add to dataset. Perhaps the zip file was empty.");
}
try {
dataset = commandEngine.submit(updateDatasetCommand);
} catch (CommandException ex) {
throw returnEarly("Couldn't update dataset " + ex);
} catch (EJBException ex) {
/**
* @todo stop bothering to catch an EJBException once this has
* been implemented:
*
* Have commands catch ConstraintViolationException and turn
* them into something that inherits from CommandException ·
* https://github.com/IQSS/dataverse/issues/1009
*/
Throwable cause = ex;
StringBuilder sb = new StringBuilder();
sb.append(ex.getLocalizedMessage());
while (cause.getCause() != null) {
cause = cause.getCause();
sb.append(cause + " ");
if (cause instanceof ConstraintViolationException) {
ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause;
for (ConstraintViolation<?> violation : constraintViolationException.getConstraintViolations()) {
sb.append(" Invalid value \"").append(violation.getInvalidValue()).append("\" for ").append(violation.getPropertyPath()).append(" at ").append(violation.getLeafBean()).append(" - ").append(violation.getMessage());
}
}
}
throw returnEarly("EJBException: " + sb.toString());
}
ingestService.startIngestJobs(dataset, user);
ReceiptGenerator receiptGenerator = new ReceiptGenerator();
String baseUrl = urlManager.getHostnamePlusBaseUrlPath(uri);
DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, dataset);
return depositReceipt;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Unable to determine target type or identifier from URL: " + uri);
}
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class CollectionDepositManagerImpl method createNew.
@Override
public DepositReceipt createNew(String collectionUri, Deposit deposit, AuthCredentials authCredentials, SwordConfiguration config) throws SwordError, SwordServerException, SwordAuthException {
AuthenticatedUser user = swordAuth.auth(authCredentials);
DataverseRequest dvReq = new DataverseRequest(user, request);
urlManager.processUrl(collectionUri);
String dvAlias = urlManager.getTargetIdentifier();
if (urlManager.getTargetType().equals("dataverse") && dvAlias != null) {
logger.log(Level.FINE, "attempting deposit into this dataverse alias: {0}", dvAlias);
Dataverse dvThatWillOwnDataset = dataverseService.findByAlias(dvAlias);
if (dvThatWillOwnDataset != null) {
logger.log(Level.FINE, "multipart: {0}", deposit.isMultipart());
logger.log(Level.FINE, "binary only: {0}", deposit.isBinaryOnly());
logger.log(Level.FINE, "entry only: {0}", deposit.isEntryOnly());
logger.log(Level.FINE, "in progress: {0}", deposit.isInProgress());
logger.log(Level.FINE, "metadata relevant: {0}", deposit.isMetadataRelevant());
if (deposit.isEntryOnly()) {
// do a sanity check on the XML received
try {
SwordEntry swordEntry = deposit.getSwordEntry();
logger.log(Level.FINE, "deposit XML received by createNew():\n{0}", swordEntry.toString());
} catch (ParseException ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Can not create dataset due to malformed Atom entry: " + ex);
}
Dataset dataset = new Dataset();
dataset.setOwner(dvThatWillOwnDataset);
String nonNullDefaultIfKeyNotFound = "";
String protocol = settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, nonNullDefaultIfKeyNotFound);
String authority = settingsService.getValueForKey(SettingsServiceBean.Key.Authority, nonNullDefaultIfKeyNotFound);
String separator = settingsService.getValueForKey(SettingsServiceBean.Key.DoiSeparator, nonNullDefaultIfKeyNotFound);
dataset.setProtocol(protocol);
dataset.setAuthority(authority);
dataset.setDoiSeparator(separator);
// Wait until the create command before actually getting an identifier
// dataset.setIdentifier(datasetService.generateDatasetIdentifier(protocol, authority, separator));
logger.log(Level.FINE, "DS Deposit identifier: {0}", dataset.getIdentifier());
CreateDatasetCommand createDatasetCommand = new CreateDatasetCommand(dataset, dvReq, false);
if (!permissionService.isUserAllowedOn(user, createDatasetCommand, dataset)) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "user " + user.getDisplayInfo().getTitle() + " is not authorized to create a dataset in this dataverse.");
}
DatasetVersion newDatasetVersion = dataset.getEditVersion();
String foreignFormat = SwordUtil.DCTERMS;
try {
importGenericService.importXML(deposit.getSwordEntry().toString(), foreignFormat, newDatasetVersion);
} catch (Exception ex) {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "problem calling importXML: " + ex);
}
swordService.addDatasetContact(newDatasetVersion, user);
swordService.addDatasetDepositor(newDatasetVersion, user);
swordService.addDatasetSubjectIfMissing(newDatasetVersion);
swordService.setDatasetLicenseAndTermsOfUse(newDatasetVersion, deposit.getSwordEntry());
Dataset createdDataset = null;
try {
createdDataset = engineSvc.submit(createDatasetCommand);
} catch (EJBException | CommandException ex) {
Throwable cause = ex;
StringBuilder sb = new StringBuilder();
sb.append(ex.getLocalizedMessage());
while (cause.getCause() != null) {
cause = cause.getCause();
/**
* @todo move this ConstraintViolationException
* check to CreateDatasetCommand. Can be triggered
* if you don't call dataset.setIdentifier() or if
* you feed it date format we don't like. Once this
* is done we should be able to drop EJBException
* from the catch above and only catch
* CommandException
*
* See also Have commands catch
* ConstraintViolationException and turn them into
* something that inherits from CommandException ·
* Issue #1009 · IQSS/dataverse -
* https://github.com/IQSS/dataverse/issues/1009
*/
if (cause instanceof ConstraintViolationException) {
ConstraintViolationException constraintViolationException = (ConstraintViolationException) cause;
for (ConstraintViolation<?> violation : constraintViolationException.getConstraintViolations()) {
sb.append(" Invalid value: '").append(violation.getInvalidValue()).append("' for ").append(violation.getPropertyPath()).append(" at ").append(violation.getLeafBean()).append(" - ").append(violation.getMessage());
}
}
}
logger.info(sb.toString());
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Couldn't create dataset: " + sb.toString());
}
if (createdDataset != null) {
ReceiptGenerator receiptGenerator = new ReceiptGenerator();
String baseUrl = urlManager.getHostnamePlusBaseUrlPath(collectionUri);
DepositReceipt depositReceipt = receiptGenerator.createDatasetReceipt(baseUrl, createdDataset);
return depositReceipt;
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Problem creating dataset. Null returned.");
}
} else if (deposit.isBinaryOnly()) {
// curl --insecure -s --data-binary "@example.zip" -H "Content-Disposition: filename=example.zip" -H "Content-Type: application/zip" https://sword:sword@localhost:8181/dvn/api/data-deposit/v1/swordv2/collection/dataverse/sword/
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Binary deposit to the collection IRI via POST is not supported. Please POST an Atom entry instead.");
} else if (deposit.isMultipart()) {
// "Yeah, multipart is critically broken across all implementations" -- http://www.mail-archive.com/sword-app-tech@lists.sourceforge.net/msg00327.html
throw new UnsupportedOperationException("Not yet implemented");
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "expected deposit types are isEntryOnly, isBinaryOnly, and isMultiPart");
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not find dataverse: " + dvAlias);
}
} else {
throw new SwordError(UriRegistry.ERROR_BAD_REQUEST, "Could not determine target type or identifier from URL: " + collectionUri);
}
}
use of edu.harvard.iq.dataverse.DatasetVersion in project dataverse by IQSS.
the class ImportServiceBean method doImport.
public JsonObjectBuilder doImport(DataverseRequest dataverseRequest, Dataverse owner, String xmlToParse, String fileName, ImportType importType, PrintWriter cleanupLog) throws ImportException, IOException {
String status = "";
Long createdId = null;
DatasetDTO dsDTO = null;
try {
dsDTO = importDDIService.doImport(importType, xmlToParse);
} catch (XMLStreamException e) {
throw new ImportException("XMLStreamException" + e);
}
// convert DTO to Json,
Gson gson = new GsonBuilder().setPrettyPrinting().create();
String json = gson.toJson(dsDTO);
JsonReader jsonReader = Json.createReader(new StringReader(json));
JsonObject obj = jsonReader.readObject();
// and call parse Json to read it into a dataset
try {
JsonParser parser = new JsonParser(datasetfieldService, metadataBlockService, settingsService);
parser.setLenient(!importType.equals(ImportType.NEW));
Dataset ds = parser.parseDataset(obj);
// we support, it will be rejected.
if (importType.equals(ImportType.NEW)) {
if (ds.getGlobalId() != null && !ds.getProtocol().equals(settingsService.getValueForKey(SettingsServiceBean.Key.Protocol, ""))) {
throw new ImportException("Could not register id " + ds.getGlobalId() + ", protocol not supported");
}
}
ds.setOwner(owner);
ds.getLatestVersion().setDatasetFields(ds.getLatestVersion().initDatasetFields());
// Check data against required contraints
List<ConstraintViolation<DatasetField>> violations = ds.getVersions().get(0).validateRequired();
if (!violations.isEmpty()) {
if (importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) {
// For migration and harvest, add NA for missing required values
for (ConstraintViolation<DatasetField> v : violations) {
DatasetField f = v.getRootBean();
f.setSingleValue(DatasetField.NA_VALUE);
}
} else {
// when importing a new dataset, the import will fail
// if required values are missing.
String errMsg = "Error importing data:";
for (ConstraintViolation<DatasetField> v : violations) {
errMsg += " " + v.getMessage();
}
throw new ImportException(errMsg);
}
}
// Check data against validation constraints
// If we are migrating and "scrub migration data" is true we attempt to fix invalid data
// if the fix fails stop processing of this file by throwing exception
Set<ConstraintViolation> invalidViolations = ds.getVersions().get(0).validate();
ValidatorFactory factory = Validation.buildDefaultValidatorFactory();
Validator validator = factory.getValidator();
if (!invalidViolations.isEmpty()) {
for (ConstraintViolation<DatasetFieldValue> v : invalidViolations) {
DatasetFieldValue f = v.getRootBean();
boolean fixed = false;
boolean converted = false;
if ((importType.equals(ImportType.MIGRATION) || importType.equals(ImportType.HARVEST)) && settingsService.isTrueForKey(SettingsServiceBean.Key.ScrubMigrationData, false)) {
fixed = processMigrationValidationError(f, cleanupLog, fileName);
converted = true;
if (fixed) {
Set<ConstraintViolation<DatasetFieldValue>> scrubbedViolations = validator.validate(f);
if (!scrubbedViolations.isEmpty()) {
fixed = false;
}
}
}
if (!fixed) {
if (importType.equals(ImportType.HARVEST)) {
String msg = "Data modified - File: " + fileName + "; Field: " + f.getDatasetField().getDatasetFieldType().getDisplayName() + "; " + "Invalid value: '" + f.getValue() + "'" + " Converted Value:'" + DatasetField.NA_VALUE + "'";
cleanupLog.println(msg);
f.setValue(DatasetField.NA_VALUE);
} else {
String msg = " Validation error for ";
if (converted) {
msg += "converted ";
}
msg += "value: " + f.getValue() + ", " + f.getValidationMessage();
throw new ImportException(msg);
}
}
}
}
Dataset existingDs = datasetService.findByGlobalId(ds.getGlobalId());
if (existingDs != null) {
if (importType.equals(ImportType.HARVEST)) {
// We will replace the current version with the imported version.
if (existingDs.getVersions().size() != 1) {
throw new ImportException("Error importing Harvested Dataset, existing dataset has " + existingDs.getVersions().size() + " versions");
}
engineSvc.submit(new DestroyDatasetCommand(existingDs, dataverseRequest));
Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
status = " updated dataset, id=" + managedDs.getId() + ".";
} else {
// check that the version number isn't already in the dataset
for (DatasetVersion dsv : existingDs.getVersions()) {
if (dsv.getVersionNumber().equals(ds.getLatestVersion().getVersionNumber())) {
throw new ImportException("VersionNumber " + ds.getLatestVersion().getVersionNumber() + " already exists in dataset " + existingDs.getGlobalId());
}
}
DatasetVersion dsv = engineSvc.submit(new CreateDatasetVersionCommand(dataverseRequest, existingDs, ds.getVersions().get(0)));
status = " created datasetVersion, for dataset " + dsv.getDataset().getGlobalId();
createdId = dsv.getId();
}
} else {
Dataset managedDs = engineSvc.submit(new CreateDatasetCommand(ds, dataverseRequest, false, importType));
status = " created dataset, id=" + managedDs.getId() + ".";
createdId = managedDs.getId();
}
} catch (JsonParseException ex) {
logger.log(Level.INFO, "Error parsing datasetVersion: {0}", ex.getMessage());
throw new ImportException("Error parsing datasetVersion: " + ex.getMessage(), ex);
} catch (CommandException ex) {
logger.log(Level.INFO, "Error excuting Create dataset command: {0}", ex.getMessage());
throw new ImportException("Error excuting dataverse command: " + ex.getMessage(), ex);
}
return Json.createObjectBuilder().add("message", status);
}
Aggregations