use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class CreateRoleCommandTest method testNonSuperUsersCantAddRoles.
@Test(expected = IllegalCommandException.class)
public void testNonSuperUsersCantAddRoles() throws CommandException {
DataverseRole dvr = new DataverseRole();
dvr.setAlias("roleTest");
dvr.setName("Tester Role");
dvr.addPermission(Permission.AddDataset);
Dataverse dv = MocksFactory.makeDataverse();
dvr.setOwner(dv);
AuthenticatedUser normalUser = new AuthenticatedUser();
normalUser.setSuperuser(false);
CreateRoleCommand sut = new CreateRoleCommand(dvr, new DataverseRequest(normalUser, IpAddress.valueOf("89.17.33.33")), dv);
engine.submit(sut);
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class CreateRoleCommandTest method testSuperUsersAddRoles.
@Test
public void testSuperUsersAddRoles() throws CommandException {
DataverseRole dvr = new DataverseRole();
dvr.setAlias("roleTest");
dvr.setName("Tester Role");
dvr.addPermission(Permission.AddDataset);
Dataverse dv = MocksFactory.makeDataverse();
dvr.setOwner(dv);
AuthenticatedUser normalUser = new AuthenticatedUser();
normalUser.setSuperuser(true);
CreateRoleCommand sut = new CreateRoleCommand(dvr, new DataverseRequest(normalUser, IpAddress.valueOf("89.17.33.33")), dv);
engine.submit(sut);
assertTrue("CreateRoleCommand did not call save on the created role.", saveCalled);
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class FileAccessIO method getDatasetDirectory.
private String getDatasetDirectory() throws IOException {
if (dvObject == null) {
throw new IOException("No DvObject defined in the Data Access Object");
}
Path datasetDirectoryPath = null;
if (dvObject instanceof Dataset) {
datasetDirectoryPath = this.getDataset().getFileSystemDirectory();
} else if (dvObject instanceof DataFile) {
datasetDirectoryPath = this.getDataFile().getOwner().getFileSystemDirectory();
} else if (dvObject instanceof Dataverse) {
throw new IOException("FileAccessIO: Dataverses are not a supported dvObject");
}
if (datasetDirectoryPath == null) {
throw new IOException("Could not determine the filesystem directory of the parent dataset.");
}
String datasetDirectory = datasetDirectoryPath.toString();
if (dvObject.getStorageIdentifier() == null || dvObject.getStorageIdentifier().isEmpty()) {
throw new IOException("Data Access: No local storage identifier defined for this datafile.");
}
return datasetDirectory;
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class S3AccessIO method open.
@Override
public void open(DataAccessOption... options) throws IOException {
if (s3 == null) {
throw new IOException("ERROR: s3 not initialised. ");
}
if (bucketName == null || !s3.doesBucketExist(bucketName)) {
throw new IOException("ERROR: S3AccessIO - You must create and configure a bucket before creating datasets.");
}
DataAccessRequest req = this.getRequest();
if (isWriteAccessRequested(options)) {
isWriteAccess = true;
isReadAccess = false;
} else {
isWriteAccess = false;
isReadAccess = true;
}
if (dvObject instanceof DataFile) {
String storageIdentifier = dvObject.getStorageIdentifier();
DataFile dataFile = this.getDataFile();
if (req != null && req.getParameter("noVarHeader") != null) {
this.setNoVarHeader(true);
}
if (storageIdentifier == null || "".equals(storageIdentifier)) {
throw new FileNotFoundException("Data Access: No local storage identifier defined for this datafile.");
}
if (isReadAccess) {
key = getMainFileKey();
S3Object s3object = s3.getObject(new GetObjectRequest(bucketName, key));
InputStream in = s3object.getObjectContent();
if (in == null) {
throw new IOException("Cannot get Object" + key);
}
this.setInputStream(in);
setChannel(Channels.newChannel(in));
this.setSize(s3object.getObjectMetadata().getContentLength());
if (dataFile.getContentType() != null && dataFile.getContentType().equals("text/tab-separated-values") && dataFile.isTabularData() && dataFile.getDataTable() != null && (!this.noVarHeader())) {
List<DataVariable> datavariables = dataFile.getDataTable().getDataVariables();
String varHeaderLine = generateVariableHeader(datavariables);
this.setVarHeader(varHeaderLine);
}
} else if (isWriteAccess) {
key = dataFile.getOwner().getAuthority() + "/" + this.getDataFile().getOwner().getIdentifier();
if (storageIdentifier.startsWith(S3_IDENTIFIER_PREFIX + "://")) {
key += "/" + storageIdentifier.substring(storageIdentifier.lastIndexOf(":") + 1);
} else {
key += "/" + storageIdentifier;
dvObject.setStorageIdentifier(S3_IDENTIFIER_PREFIX + "://" + bucketName + ":" + storageIdentifier);
}
}
this.setMimeType(dataFile.getContentType());
try {
this.setFileName(dataFile.getFileMetadata().getLabel());
} catch (Exception ex) {
this.setFileName("unknown");
}
} else if (dvObject instanceof Dataset) {
Dataset dataset = this.getDataset();
key = dataset.getAuthority() + "/" + dataset.getIdentifier();
dataset.setStorageIdentifier(S3_IDENTIFIER_PREFIX + "://" + key);
} else if (dvObject instanceof Dataverse) {
throw new IOException("Data Access: Invalid DvObject type : Dataverse");
} else {
throw new IOException("Data Access: Invalid DvObject type");
}
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class IndexServiceBean method indexDataverse.
public Future<String> indexDataverse(Dataverse dataverse) {
logger.fine("indexDataverse called on dataverse id " + dataverse.getId() + "(" + dataverse.getAlias() + ")");
if (dataverse.getId() == null) {
String msg = "unable to index dataverse. id was null (alias: " + dataverse.getAlias() + ")";
logger.info(msg);
return new AsyncResult<>(msg);
}
Dataverse rootDataverse = findRootDataverseCached();
if (rootDataverse == null) {
String msg = "Could not find root dataverse and the root dataverse should not be indexed. Returning.";
return new AsyncResult<>(msg);
} else if (dataverse.getId() == rootDataverse.getId()) {
String msg = "The root dataverse should not be indexed. Returning.";
return new AsyncResult<>(msg);
}
Collection<SolrInputDocument> docs = new ArrayList<>();
SolrInputDocument solrInputDocument = new SolrInputDocument();
solrInputDocument.addField(SearchFields.ID, solrDocIdentifierDataverse + dataverse.getId());
solrInputDocument.addField(SearchFields.ENTITY_ID, dataverse.getId());
solrInputDocument.addField(SearchFields.DATAVERSE_VERSION_INDEXED_BY, systemConfig.getVersion());
solrInputDocument.addField(SearchFields.IDENTIFIER, dataverse.getAlias());
solrInputDocument.addField(SearchFields.TYPE, "dataverses");
solrInputDocument.addField(SearchFields.NAME, dataverse.getName());
solrInputDocument.addField(SearchFields.NAME_SORT, dataverse.getName());
solrInputDocument.addField(SearchFields.DATAVERSE_NAME, dataverse.getName());
solrInputDocument.addField(SearchFields.DATAVERSE_CATEGORY, dataverse.getIndexableCategoryName());
if (dataverse.isReleased()) {
solrInputDocument.addField(SearchFields.PUBLICATION_STATUS, PUBLISHED_STRING);
solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataverse.getPublicationDate());
solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE_SEARCHABLE_TEXT, convertToFriendlyDate(dataverse.getPublicationDate()));
} else {
solrInputDocument.addField(SearchFields.PUBLICATION_STATUS, UNPUBLISHED_STRING);
solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE, dataverse.getCreateDate());
solrInputDocument.addField(SearchFields.RELEASE_OR_CREATE_DATE_SEARCHABLE_TEXT, convertToFriendlyDate(dataverse.getCreateDate()));
}
/* We don't really have harvested dataverses yet;
(I have in fact just removed the isHarvested() method from the Dataverse object) -- L.A.
if (dataverse.isHarvested()) {
solrInputDocument.addField(SearchFields.IS_HARVESTED, true);
solrInputDocument.addField(SearchFields.SOURCE, HARVESTED);
} else { (this means that all dataverses are "local" - should this be removed? */
solrInputDocument.addField(SearchFields.IS_HARVESTED, false);
// rootDataverseName);
solrInputDocument.addField(SearchFields.METADATA_SOURCE, findRootDataverseCached().getName());
/*}*/
addDataverseReleaseDateToSolrDoc(solrInputDocument, dataverse);
// if (dataverse.getOwner() != null) {
// solrInputDocument.addField(SearchFields.HOST_DATAVERSE, dataverse.getOwner().getName());
// }
solrInputDocument.addField(SearchFields.DESCRIPTION, StringUtil.html2text(dataverse.getDescription()));
solrInputDocument.addField(SearchFields.DATAVERSE_DESCRIPTION, StringUtil.html2text(dataverse.getDescription()));
// logger.info("dataverse affiliation: " + dataverse.getAffiliation());
if (dataverse.getAffiliation() != null && !dataverse.getAffiliation().isEmpty()) {
/**
* @todo: stop using affiliation as category
*/
// solrInputDocument.addField(SearchFields.CATEGORY, dataverse.getAffiliation());
solrInputDocument.addField(SearchFields.AFFILIATION, dataverse.getAffiliation());
solrInputDocument.addField(SearchFields.DATAVERSE_AFFILIATION, dataverse.getAffiliation());
}
for (ControlledVocabularyValue dataverseSubject : dataverse.getDataverseSubjects()) {
String subject = dataverseSubject.getStrValue();
if (!subject.equals(DatasetField.NA_VALUE)) {
solrInputDocument.addField(SearchFields.DATAVERSE_SUBJECT, subject);
// collapse into shared "subject" field used as a facet
solrInputDocument.addField(SearchFields.SUBJECT, subject);
}
}
// checking for NPE is important so we can create the root dataverse
if (rootDataverse != null && !dataverse.equals(rootDataverse)) {
// important when creating root dataverse
if (dataverse.getOwner() != null) {
solrInputDocument.addField(SearchFields.PARENT_ID, dataverse.getOwner().getId());
solrInputDocument.addField(SearchFields.PARENT_NAME, dataverse.getOwner().getName());
}
}
List<String> dataversePathSegmentsAccumulator = new ArrayList<>();
List<String> dataverseSegments = findPathSegments(dataverse, dataversePathSegmentsAccumulator);
List<String> dataversePaths = getDataversePathsFromSegments(dataverseSegments);
if (dataversePaths.size() > 0) {
// don't show yourself while indexing or in search results: https://redmine.hmdc.harvard.edu/issues/3613
// logger.info(dataverse.getName() + " size " + dataversePaths.size());
dataversePaths.remove(dataversePaths.size() - 1);
}
// Add paths for linking dataverses
for (Dataverse linkingDataverse : dvLinkingService.findLinkingDataverses(dataverse.getId())) {
List<String> linkingDataversePathSegmentsAccumulator = new ArrayList<>();
List<String> linkingdataverseSegments = findPathSegments(linkingDataverse, linkingDataversePathSegmentsAccumulator);
List<String> linkingDataversePaths = getDataversePathsFromSegments(linkingdataverseSegments);
for (String dvPath : linkingDataversePaths) {
dataversePaths.add(dvPath);
}
}
solrInputDocument.addField(SearchFields.SUBTREE, dataversePaths);
docs.add(solrInputDocument);
String status;
try {
if (dataverse.getId() != null) {
solrServer.add(docs);
} else {
logger.info("WARNING: indexing of a dataverse with no id attempted");
}
} catch (SolrServerException | IOException ex) {
status = ex.toString();
logger.info(status);
return new AsyncResult<>(status);
}
try {
solrServer.commit();
} catch (SolrServerException | IOException ex) {
status = ex.toString();
logger.info(status);
return new AsyncResult<>(status);
}
dvObjectService.updateContentIndexTime(dataverse);
IndexResponse indexResponse = solrIndexService.indexPermissionsForOneDvObject(dataverse);
String msg = "indexed dataverse " + dataverse.getId() + ":" + dataverse.getAlias() + ". Response from permission indexing: " + indexResponse.getMessage();
return new AsyncResult<>(msg);
}
Aggregations