use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class XgetRecordHandler method createRecord.
private Xrecord createRecord(OAICompiledRequest parameters, Item item) throws BadArgumentException, CannotDisseminateRecordException, OAIException, NoMetadataFormatsException, CannotDisseminateFormatException {
MetadataFormat format = getContext().formatForPrefix(parameters.getMetadataPrefix());
Header header = new Header();
Dataset dataset = ((Xitem) item).getDataset();
Xrecord xrecord = new Xrecord().withFormatName(parameters.getMetadataPrefix()).withDataset(dataset);
header.withIdentifier(item.getIdentifier());
ItemHelper itemHelperWrap = new ItemHelper(item);
header.withDatestamp(item.getDatestamp());
for (Set set : itemHelperWrap.getSets(getContext(), getRepository().getFilterResolver())) header.withSetSpec(set.getSpec());
if (item.isDeleted())
header.withStatus(Header.Status.DELETED);
xrecord.withHeader(header);
xrecord.withMetadata(item.getMetadata());
return xrecord;
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class IngestServiceBean method addFilesToDataset.
// addFilesToDataset() takes a list of new DataFiles and attaches them to the parent
// Dataset (the files are attached to the dataset, and the fileMetadatas to the
// supplied version).
public void addFilesToDataset(DatasetVersion version, List<DataFile> newFiles) {
if (newFiles != null && newFiles.size() > 0) {
Dataset dataset = version.getDataset();
for (DataFile dataFile : newFiles) {
// These are all brand new files, so they should all have
// one filemetadata total. -- L.A.
FileMetadata fileMetadata = dataFile.getFileMetadatas().get(0);
String fileName = fileMetadata.getLabel();
// Attach the file to the dataset and to the version:
dataFile.setOwner(dataset);
version.getFileMetadatas().add(dataFile.getFileMetadata());
dataFile.getFileMetadata().setDatasetVersion(version);
dataset.getFiles().add(dataFile);
}
}
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class XitemRepository method getItem.
@Override
public Item getItem(String identifier) throws IdDoesNotExistException, OAIException {
logger.fine("getItem; calling findOaiRecordsByGlobalId, identifier " + identifier);
List<OAIRecord> oaiRecords = recordService.findOaiRecordsByGlobalId(identifier);
if (oaiRecords != null && !oaiRecords.isEmpty()) {
Xitem xoaiItem = null;
for (OAIRecord record : oaiRecords) {
if (xoaiItem == null) {
Dataset dataset = datasetService.findByGlobalId(record.getGlobalId());
if (dataset != null) {
xoaiItem = new Xitem(record).withDataset(dataset);
}
} else {
// is part of multiple sets:
if (!StringUtil.isEmpty(record.getSetName())) {
xoaiItem.getSets().add(new Set(record.getSetName()));
}
}
}
if (xoaiItem != null) {
return xoaiItem;
}
}
throw new IdDoesNotExistException();
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class XitemRepository method getItems.
@Override
public ListItemsResults getItems(List<ScopedFilter> filters, int offset, int length, String setSpec, Date from, Date until) throws OAIException {
logger.fine("calling getItems; offset=" + offset + ", length=" + length + ", setSpec=" + setSpec + ", from=" + from + ", until=" + until);
List<OAIRecord> oaiRecords = recordService.findOaiRecordsBySetName(setSpec, from, until);
logger.fine("total " + oaiRecords.size() + " returned");
List<Item> xoaiItems = new ArrayList<>();
if (oaiRecords != null && !oaiRecords.isEmpty()) {
for (int i = offset; i < offset + length && i < oaiRecords.size(); i++) {
OAIRecord oaiRecord = oaiRecords.get(i);
Dataset dataset = datasetService.findByGlobalId(oaiRecord.getGlobalId());
if (dataset != null) {
Xitem xItem = new Xitem(oaiRecord).withDataset(dataset);
xoaiItems.add(xItem);
}
}
if (!StringUtil.isEmpty(setSpec)) {
addExtraSets(xoaiItems, setSpec, from, until);
}
boolean hasMore = offset + length < oaiRecords.size();
ListItemsResults result = new ListItemsResults(hasMore, xoaiItems);
logger.fine("returning result with " + xoaiItems.size() + " items.");
return result;
}
return new ListItemsResults(false, xoaiItems);
}
use of edu.harvard.iq.dataverse.Dataset in project dataverse by IQSS.
the class IngestMessageBean method onMessage.
@TransactionAttribute(TransactionAttributeType.NOT_SUPPORTED)
public void onMessage(Message message) {
IngestMessage ingestMessage = null;
Long datafile_id = null;
try {
ObjectMessage om = (ObjectMessage) message;
ingestMessage = (IngestMessage) om.getObject();
Iterator iter = ingestMessage.getFileIds().iterator();
datafile_id = null;
// -- L.A. Aug. 13 2014
while (iter.hasNext()) {
datafile_id = (Long) iter.next();
logger.fine("Start ingest job;");
try {
if (ingestService.ingestAsTabular(datafile_id)) {
// Thread.sleep(10000);
logger.fine("Finished ingest job;");
} else {
logger.warning("Error occurred during ingest job!");
}
} catch (Exception ex) {
// ex.printStackTrace();
// TODO:
// this solution is working - but it would be cleaner to instead
// make sure that all the exceptions are interrupted and appropriate
// action taken still on the ingest service side.
// -- L.A. Aug. 13 2014;
logger.info("Unknown exception occurred during ingest (supressed stack trace); re-setting ingest status.");
if (datafile_id != null) {
logger.fine("looking up datafile for id " + datafile_id);
DataFile datafile = datafileService.find(datafile_id);
if (datafile != null) {
datafile.SetIngestProblem();
IngestReport errorReport = new IngestReport();
errorReport.setFailure();
if (ex.getMessage() != null) {
errorReport.setReport("Ingest succeeded, but failed to save the ingested tabular data in the database: " + ex.getMessage());
} else {
errorReport.setReport("Ingest succeeded, but failed to save the ingested tabular data in the database; no further information is available");
}
errorReport.setDataFile(datafile);
datafile.setIngestReport(errorReport);
datafile.setDataTables(null);
logger.info("trying to save datafile and the failed ingest report, id=" + datafile_id);
datafile = datafileService.save(datafile);
Dataset dataset = datafile.getOwner();
if (dataset != null && dataset.getId() != null) {
// logger.info("attempting to remove dataset lock for dataset " + dataset.getId());
// datasetService.removeDatasetLock(dataset.getId());
ingestService.sendFailNotification(dataset.getId());
}
}
}
}
}
// packed into this IngestMessage belong to the same dataset)
if (datafile_id != null) {
DataFile datafile = datafileService.find(datafile_id);
if (datafile != null) {
Dataset dataset = datafile.getOwner();
if (dataset != null && dataset.getId() != null) {
datasetService.removeDatasetLocks(dataset.getId(), DatasetLock.Reason.Ingest);
}
}
}
} catch (JMSException ex) {
// error in getting object from message; can't send e-mail
ex.printStackTrace();
} finally {
// when we're done, go ahead and remove the lock (not yet)
try {
// datasetService.removeDatasetLock( ingestMessage.getDatasetId() );
} catch (Exception ex) {
// application was unable to remove the datasetLock
ex.printStackTrace();
}
}
}
Aggregations