use of edu.harvard.iq.dataverse.DataFile in project dataverse by IQSS.
the class DdiExportUtil method createDataDscr.
// Methods specific to the tabular data ("<dataDscr>") section.
// Note that these do NOT operate on DTO objects, but instead directly
// on Dataverse DataVariable, DataTable, etc. objects.
// This is because for this release (4.5) we are recycling the already available
// code, and this is what we got. (We already have DTO objects for DataTable,
// and DataVariable, etc., but the current version JsonPrinter.jsonAsDatasetDto()
// does not produce JSON for these objects - it stops at DataFile. Eventually
// we want all of our objects to be exportable as JSON, and then all the exports
// can go through the same DTO state... But we don't have time for it now;
// plus, the structure of file-level metadata is currently being re-designed,
// so we probably should not invest any time into it right now). -- L.A. 4.5
private static void createDataDscr(XMLStreamWriter xmlw, DatasetVersion datasetVersion) throws XMLStreamException {
if (datasetVersion.getFileMetadatas() == null || datasetVersion.getFileMetadatas().isEmpty()) {
return;
}
boolean tabularData = false;
// tabular datafile.
for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) {
DataFile dataFile = fileMetadata.getDataFile();
if (dataFile != null && dataFile.isTabularData()) {
if (!tabularData) {
xmlw.writeStartElement("dataDscr");
tabularData = true;
}
List<DataVariable> vars = dataFile.getDataTable().getDataVariables();
for (DataVariable var : vars) {
createVarDDI(xmlw, var);
}
}
}
if (tabularData) {
// dataDscr
xmlw.writeEndElement();
}
}
use of edu.harvard.iq.dataverse.DataFile in project dataverse by IQSS.
the class DeleteHarvestingClientCommand method executeImpl.
@Override
public void executeImpl(CommandContext ctxt) throws CommandException {
if (harvestingClient == null) {
throw new IllegalCommandException("DeleteHarvestingClientCommand: attempted to execute with null harvesting client; dataverse: " + motherDataverse.getAlias(), this);
}
HarvestingClient merged = ctxt.em().merge(harvestingClient);
for (DataFile harvestedFile : ctxt.files().findHarvestedFilesByClient(merged)) {
DataFile mergedFile = ctxt.em().merge(harvestedFile);
ctxt.em().remove(mergedFile);
harvestedFile = null;
}
ctxt.em().remove(merged);
}
use of edu.harvard.iq.dataverse.DataFile in project dataverse by IQSS.
the class HarvestingClientServiceBean method deleteClient.
// Deleting a client, with all the associated content, can take a while -
// hence it's an async action:
// TOFIGUREOUT:
// for whatever reason I cannot call the DeleteHarvestingClientCommand from
// inside this method; something to do with it being asynchronous?
@Asynchronous
public void deleteClient(Long clientId) {
String errorMessage = null;
HarvestingClient victim = find(clientId);
if (victim == null) {
return;
}
try {
// engineService.submit(new DeleteHarvestingClientCommand(dvRequestService.getDataverseRequest(), victim));
HarvestingClient merged = em.merge(victim);
// if this was a scheduled harvester, make sure the timer is deleted:
dataverseTimerService.removeHarvestTimer(victim);
// purge indexed objects:
indexService.deleteHarvestedDocuments(victim);
// proceed removing the client itself.
for (DataFile harvestedFile : dataFileService.findHarvestedFilesByClient(merged)) {
DataFile mergedFile = em.merge(harvestedFile);
em.remove(mergedFile);
harvestedFile = null;
}
em.remove(merged);
} catch (Exception e) {
errorMessage = "Failed to delete cleint. Unknown exception: " + e.getMessage();
}
if (errorMessage != null) {
logger.warning(errorMessage);
}
}
use of edu.harvard.iq.dataverse.DataFile in project dataverse by IQSS.
the class DDIExportServiceBean method export.
/*
* Workhorse methods, that do all the work:
*/
private void export(String objectTag, Long objectId, OutputStream os, String partialExclude, String partialInclude) {
/*
* Some checks will need to be here, to see if the corresponding dataset
* is released, if all the permissions are satisfied, etc., with
* approrpiate exceptions thrown otherwise.
*
* something like
throw new IllegalArgumentException("ExportStudy called with a null study.");
throw new IllegalArgumentException("Study does not have released version, study.id = " + s.getId());
*/
Set<String> includedFieldSet = null;
Set<String> excludedFieldSet = null;
DatasetVersion releasedVersion = null;
if (partialExclude != null && !"".equals(partialExclude)) {
excludedFieldSet = new HashSet<String>();
String[] splitTokens = partialExclude.split(",");
for (int i = 0; i < splitTokens.length; i++) {
if (splitTokens[i] != null && !splitTokens[i].equals("")) {
excludedFieldSet.add(splitTokens[i]);
}
}
}
if (partialInclude != null && !"".equals(partialInclude)) {
includedFieldSet = new HashSet<String>();
String[] splitTokens = partialInclude.split(",");
for (int i = 0; i < splitTokens.length; i++) {
if (splitTokens[i] != null && !splitTokens[i].equals("")) {
includedFieldSet.add(splitTokens[i]);
}
}
}
// Create XML Stream Writer, using the supplied OutputStream:
XMLStreamWriter xmlw = null;
// Try to resolve the supplied object id:
Object dataObject = null;
if (OBJECT_TAG_VARIABLE.equals(objectTag)) {
dataObject = variableService.find(objectId);
if (dataObject == null) {
throw new IllegalArgumentException("Metadata Export: Invalid variable id supplied.");
}
} else if (OBJECT_TAG_DATAFILE.equals(objectTag)) {
dataObject = fileService.find(objectId);
if (dataObject == null) {
throw new IllegalArgumentException("Metadata Export: Invalid datafile id supplied.");
}
} else if (OBJECT_TAG_DATASET.equals(objectTag)) {
dataObject = datasetService.find(objectId);
if (dataObject == null) {
throw new IllegalArgumentException("Metadata Export: Invalid dataset id supplied.");
}
releasedVersion = ((Dataset) dataObject).getReleasedVersion();
if (releasedVersion == null) {
throw new IllegalArgumentException("Metadata Export: Dataset not released.");
}
} else {
throw new IllegalArgumentException("Metadata Export: Unsupported export requested.");
}
try {
xmlw = xmlOutputFactory.createXMLStreamWriter(os);
xmlw.writeStartDocument();
if (OBJECT_TAG_VARIABLE.equals(objectTag)) {
createVarDDI(xmlw, excludedFieldSet, includedFieldSet, (DataVariable) dataObject);
} else if (OBJECT_TAG_DATAFILE.equals(objectTag)) {
createDataFileDDI(xmlw, excludedFieldSet, includedFieldSet, (DataFile) dataObject);
} else if (OBJECT_TAG_DATASET.equals(objectTag)) {
createDatasetDDI(xmlw, excludedFieldSet, includedFieldSet, releasedVersion);
}
xmlw.writeEndDocument();
} catch (XMLStreamException ex) {
Logger.getLogger("global").log(Level.SEVERE, null, ex);
throw new EJBException("ERROR occurred during partial export of a study.", ex);
} finally {
try {
if (xmlw != null) {
xmlw.close();
}
} catch (XMLStreamException ex) {
}
}
}
use of edu.harvard.iq.dataverse.DataFile in project dataverse by IQSS.
the class DdiExportUtil method createFileDscr.
private static void createFileDscr(XMLStreamWriter xmlw, DatasetVersion datasetVersion) throws XMLStreamException {
String dataverseUrl = getDataverseSiteUrl();
for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) {
DataFile dataFile = fileMetadata.getDataFile();
if (dataFile != null && dataFile.isTabularData()) {
DataTable dt = dataFile.getDataTable();
xmlw.writeStartElement("fileDscr");
writeAttribute(xmlw, "ID", "f" + dataFile.getId());
writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + dataFile.getId());
xmlw.writeStartElement("fileTxt");
xmlw.writeStartElement("fileName");
xmlw.writeCharacters(fileMetadata.getLabel());
// fileName
xmlw.writeEndElement();
if (dt.getCaseQuantity() != null || dt.getVarQuantity() != null || dt.getRecordsPerCase() != null) {
xmlw.writeStartElement("dimensns");
if (dt.getCaseQuantity() != null) {
xmlw.writeStartElement("caseQnty");
xmlw.writeCharacters(dt.getCaseQuantity().toString());
// caseQnty
xmlw.writeEndElement();
}
if (dt.getVarQuantity() != null) {
xmlw.writeStartElement("varQnty");
xmlw.writeCharacters(dt.getVarQuantity().toString());
// varQnty
xmlw.writeEndElement();
}
if (dt.getRecordsPerCase() != null) {
xmlw.writeStartElement("recPrCas");
xmlw.writeCharacters(dt.getRecordsPerCase().toString());
// recPrCas
xmlw.writeEndElement();
}
// dimensns
xmlw.writeEndElement();
}
xmlw.writeStartElement("fileType");
xmlw.writeCharacters(dataFile.getContentType());
// fileType
xmlw.writeEndElement();
// fileTxt
xmlw.writeEndElement();
// (Universal Numeric Fingerprint) signature:
if (dt.getUnf() != null && !dt.getUnf().equals("")) {
xmlw.writeStartElement("notes");
writeAttribute(xmlw, "level", LEVEL_FILE);
writeAttribute(xmlw, "type", NOTE_TYPE_UNF);
writeAttribute(xmlw, "subject", NOTE_SUBJECT_UNF);
xmlw.writeCharacters(dt.getUnf());
// notes
xmlw.writeEndElement();
}
if (dataFile.getTags() != null) {
for (int i = 0; i < dataFile.getTags().size(); i++) {
xmlw.writeStartElement("notes");
writeAttribute(xmlw, "level", LEVEL_FILE);
writeAttribute(xmlw, "type", NOTE_TYPE_TAG);
writeAttribute(xmlw, "subject", NOTE_SUBJECT_TAG);
xmlw.writeCharacters(dataFile.getTags().get(i).getTypeLabel());
// notes
xmlw.writeEndElement();
}
}
// TODO: add the remaining fileDscr elements!
// fileDscr
xmlw.writeEndElement();
}
}
}
Aggregations