use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class Access method dvCardImage.
@Path("dvCardImage/{dataverseId}")
@GET
@Produces({ "image/png" })
public InputStream dvCardImage(@PathParam("dataverseId") Long dataverseId, @Context UriInfo uriInfo, @Context HttpHeaders headers, @Context HttpServletResponse response) /*throws NotFoundException, ServiceUnavailableException, PermissionDeniedException, AuthorizationRequiredException*/
{
logger.fine("entering dvCardImage");
Dataverse dataverse = dataverseService.find(dataverseId);
if (dataverse == null) {
logger.warning("Preview: Version service could not locate a DatasetVersion object for id " + dataverseId + "!");
return null;
}
String imageThumbFileName = null;
if (dataverse.getDataverseTheme() != null && dataverse.getDataverseTheme().getLogo() != null && !dataverse.getDataverseTheme().getLogo().equals("")) {
File dataverseLogoFile = getLogo(dataverse);
if (dataverseLogoFile != null) {
logger.fine("dvCardImage: logo file found");
String logoThumbNailPath = null;
InputStream in = null;
try {
if (dataverseLogoFile.exists()) {
logoThumbNailPath = ImageThumbConverter.generateImageThumbnailFromFile(dataverseLogoFile.getAbsolutePath(), 48);
if (logoThumbNailPath != null) {
in = new FileInputStream(logoThumbNailPath);
}
}
} catch (Exception ex) {
in = null;
}
if (in != null) {
logger.fine("dvCardImage: successfully obtained thumbnail for dataverse logo.");
return in;
}
}
}
/*
StorageIO thumbnailDataAccess = null;
if (!dataverse.isHarvested()) {
for (Dataset dataset : datasetService.findPublishedByOwnerId(dataverseId)) {
logger.info("dvCardImage: checking dataset "+dataset.getGlobalId());
if (dataset != null) {
DatasetVersion releasedVersion = dataset.getReleasedVersion();
logger.info("dvCardImage: obtained released version "+releasedVersion.getTitle());
thumbnailDataAccess = getThumbnailForDatasetVersion(releasedVersion);
if (thumbnailDataAccess != null) {
logger.info("dvCardImage: obtained thumbnail for the version.");
break;
}
}
}
}
if (thumbnailDataAccess != null && thumbnailDataAccess.getInputStream() != null) {
return thumbnailDataAccess.getInputStream();
}
*/
return null;
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class JsonParser method parseDataverse.
public Dataverse parseDataverse(JsonObject jobj) throws JsonParseException {
Dataverse dv = new Dataverse();
/**
* @todo Instead of this getMandatoryString method we should run the
* String through ConstraintValidator. See EMailValidatorTest and
* EMailValidator for examples. That way we can check not only if it's
* required or not but other bean validation rules such as "must match
* this regex".
*/
dv.setAlias(getMandatoryString(jobj, "alias"));
dv.setName(getMandatoryString(jobj, "name"));
dv.setDescription(jobj.getString("description", null));
dv.setPermissionRoot(jobj.getBoolean("permissionRoot", false));
dv.setFacetRoot(jobj.getBoolean("facetRoot", false));
dv.setAffiliation(jobj.getString("affiliation", null));
if (jobj.containsKey("dataverseContacts")) {
JsonArray dvContacts = jobj.getJsonArray("dataverseContacts");
int i = 0;
List<DataverseContact> dvContactList = new LinkedList<>();
for (JsonValue jsv : dvContacts) {
DataverseContact dvc = new DataverseContact(dv);
dvc.setContactEmail(getMandatoryString((JsonObject) jsv, "contactEmail"));
dvc.setDisplayOrder(i++);
dvContactList.add(dvc);
}
dv.setDataverseContacts(dvContactList);
}
if (jobj.containsKey("theme")) {
DataverseTheme theme = parseDataverseTheme(jobj.getJsonObject("theme"));
dv.setDataverseTheme(theme);
theme.setDataverse(dv);
}
// default
dv.setDataverseType(Dataverse.DataverseType.UNCATEGORIZED);
if (jobj.containsKey("dataverseType")) {
for (Dataverse.DataverseType dvtype : Dataverse.DataverseType.values()) {
if (dvtype.name().equals(jobj.getString("dataverseType"))) {
dv.setDataverseType(dvtype);
}
}
}
return dv;
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class JsonPrinter method getRootDataverseNameforCitation.
private static String getRootDataverseNameforCitation(Dataset dataset) {
Dataverse root = dataset.getOwner();
while (root.getOwner() != null) {
root = root.getOwner();
}
String rootDataverseName = root.getName();
if (!StringUtil.isEmpty(rootDataverseName)) {
return rootDataverseName;
} else {
return "";
}
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class SolrIndexServiceBean method indexPermissionsOnSelfAndChildren.
/**
* We use the database to determine direct children since there is no
* inheritance
*/
public IndexResponse indexPermissionsOnSelfAndChildren(DvObject definitionPoint) {
List<DvObject> dvObjectsToReindexPermissionsFor = new ArrayList<>();
List<DataFile> filesToReindexAsBatch = new ArrayList<>();
// so don't create a Solr "permission" doc either.
if (definitionPoint.isInstanceofDataverse()) {
Dataverse selfDataverse = (Dataverse) definitionPoint;
if (!selfDataverse.equals(dataverseService.findRootDataverse())) {
dvObjectsToReindexPermissionsFor.add(definitionPoint);
}
List<Dataset> directChildDatasetsOfDvDefPoint = datasetService.findByOwnerId(selfDataverse.getId());
for (Dataset dataset : directChildDatasetsOfDvDefPoint) {
dvObjectsToReindexPermissionsFor.add(dataset);
for (DataFile datafile : filesToReIndexPermissionsFor(dataset)) {
filesToReindexAsBatch.add(datafile);
}
}
} else if (definitionPoint.isInstanceofDataset()) {
// index the dataset itself
indexPermissionsForOneDvObject(definitionPoint);
dvObjectsToReindexPermissionsFor.add(definitionPoint);
// index files
Dataset dataset = (Dataset) definitionPoint;
for (DataFile datafile : filesToReIndexPermissionsFor(dataset)) {
filesToReindexAsBatch.add(datafile);
}
} else {
dvObjectsToReindexPermissionsFor.add(definitionPoint);
}
/**
* @todo Error handling? What to do with response?
*
* @todo Should update timestamps, probably, even thought these are
* files, see https://github.com/IQSS/dataverse/issues/2421
*/
String response = reindexFilesInBatches(filesToReindexAsBatch);
List<String> updatePermissionTimeSuccessStatus = new ArrayList<>();
for (DvObject dvObject : dvObjectsToReindexPermissionsFor) {
/**
* @todo do something with this response
*/
IndexResponse indexResponse = indexPermissionsForOneDvObject(dvObject);
DvObject managedDefinitionPoint = dvObjectService.updatePermissionIndexTime(definitionPoint);
boolean updatePermissionTimeSuccessful = false;
if (managedDefinitionPoint != null) {
updatePermissionTimeSuccessful = true;
}
updatePermissionTimeSuccessStatus.add(dvObject + ":" + updatePermissionTimeSuccessful);
}
return new IndexResponse("Number of dvObject permissions indexed for " + definitionPoint + " (updatePermissionTimeSuccessful:" + updatePermissionTimeSuccessStatus + "): " + dvObjectsToReindexPermissionsFor.size());
}
use of edu.harvard.iq.dataverse.Dataverse in project dataverse by IQSS.
the class IndexAllServiceBean method indexAllOrSubset.
public Future<String> indexAllOrSubset(long numPartitions, long partitionId, boolean skipIndexed) {
long indexAllTimeBegin = System.currentTimeMillis();
String status;
String resultOfClearingIndexTimes;
/**
* @todo Should we allow sysadmins to request that the Solr index and
* related timestamps in the database be cleared as part of "index all"?
* If so, we can make this boolean a parameter that's passed into this
* method. A method to do this clearing has been added as a separate API
* endpoint.
*/
boolean clearSolrAndTimestamps = false;
/**
* We only allow clearing of Solr and database index timestamps if we
* are operating on the entire index ("index all") and if we are not
* running in "continue" mode.
*/
if (numPartitions == 1 && !skipIndexed && clearSolrAndTimestamps) {
logger.info("attempting to delete all Solr documents before a complete re-index");
try {
JsonObject response = solrIndexService.deleteAllFromSolrAndResetIndexTimes().build();
String message = response.getString(SolrIndexServiceBean.messageString);
int numRowsCleared = response.getInt(SolrIndexServiceBean.numRowsClearedByClearAllIndexTimes);
resultOfClearingIndexTimes = message + " Database rows from which index timestamps were cleared: " + numRowsCleared;
} catch (SolrServerException | IOException ex) {
resultOfClearingIndexTimes = "Solr index and database timestamps were not cleared: " + ex;
}
} else {
resultOfClearingIndexTimes = "Solr index was not cleared before indexing.";
}
// List<Dataverse> dataverses = dataverseService.findAllOrSubset(numPartitions, partitionId, skipIndexed);
// Note: no support for "partitions" in this experimental branch.
// The method below returns the ids of all the unindexed dataverses.
List<Long> dataverseIds = dataverseIds = dataverseService.findDataverseIdsForIndexing(skipIndexed);
int dataverseIndexCount = 0;
int dataverseFailureCount = 0;
// for (Dataverse dataverse : dataverses) {
for (Long id : dataverseIds) {
try {
dataverseIndexCount++;
Dataverse dataverse = dataverseService.find(id);
logger.info("indexing dataverse " + dataverseIndexCount + " of " + dataverseIds.size() + " (id=" + id + ", persistentId=" + dataverse.getAlias() + ")");
Future<String> result = indexService.indexDataverseInNewTransaction(dataverse);
dataverse = null;
} catch (Exception e) {
// We want to keep running even after an exception so throw some more info into the log
dataverseFailureCount++;
logger.info("FAILURE indexing dataverse " + dataverseIndexCount + " of " + dataverseIds.size() + " (id=" + id + ") Exception info: " + e.getMessage());
}
}
int datasetIndexCount = 0;
int datasetFailureCount = 0;
List<Long> datasetIds = datasetService.findAllOrSubset(numPartitions, partitionId, skipIndexed);
for (Long id : datasetIds) {
try {
datasetIndexCount++;
logger.info("indexing dataset " + datasetIndexCount + " of " + datasetIds.size() + " (id=" + id + ")");
Future<String> result = indexService.indexDatasetInNewTransaction(id);
} catch (Exception e) {
// We want to keep running even after an exception so throw some more info into the log
datasetFailureCount++;
logger.info("FAILURE indexing dataset " + datasetIndexCount + " of " + datasetIds.size() + " (id=" + id + ") Exception info: " + e.getMessage());
}
}
logger.info("done iterating through all datasets");
long indexAllTimeEnd = System.currentTimeMillis();
String timeElapsed = "index all took " + (indexAllTimeEnd - indexAllTimeBegin) + " milliseconds";
logger.info(timeElapsed);
if (datasetFailureCount + dataverseFailureCount > 0) {
String failureMessage = "There were index failures. " + dataverseFailureCount + " dataverse(s) and " + datasetFailureCount + " dataset(s) failed to index. Please check the log for more information.";
logger.info(failureMessage);
}
status = dataverseIndexCount + " dataverses and " + datasetIndexCount + " datasets indexed. " + timeElapsed + ". " + resultOfClearingIndexTimes + "\n";
logger.info(status);
return new AsyncResult<>(status);
}
Aggregations