use of edu.harvard.iq.dataverse.FileMetadata in project dataverse by IQSS.
the class DdiExportUtil method createOtherMatsFromFileMetadatas.
// An alternative version of the createOtherMats method - this one is used
// when a "full" DDI is being cooked; just like the fileDscr and data/var sections methods,
// it operates on the list of FileMetadata entities, not on File DTOs. This is because
// DTOs do not support "tabular", variable-level metadata yet. And we need to be able to
// tell if this file is in fact tabular data - so that we know if it needs an
// otherMat, or a fileDscr section.
// -- L.A. 4.5
private static void createOtherMatsFromFileMetadatas(XMLStreamWriter xmlw, List<FileMetadata> fileMetadatas) throws XMLStreamException {
// The preferred URL for this dataverse, for cooking up the file access API links:
String dataverseUrl = getDataverseSiteUrl();
for (FileMetadata fileMetadata : fileMetadatas) {
// and observations, etc.)
if (fileMetadata.getDataFile() != null && !fileMetadata.getDataFile().isTabularData()) {
xmlw.writeStartElement("otherMat");
writeAttribute(xmlw, "ID", "f" + fileMetadata.getDataFile().getId());
writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + fileMetadata.getDataFile().getId());
writeAttribute(xmlw, "level", "datafile");
xmlw.writeStartElement("labl");
xmlw.writeCharacters(fileMetadata.getLabel());
// labl
xmlw.writeEndElement();
String description = fileMetadata.getDescription();
if (description != null) {
xmlw.writeStartElement("txt");
xmlw.writeCharacters(description);
// txt
xmlw.writeEndElement();
}
// there's no readily available field in the othermat section
// for the content type (aka mime type); so we'll store it in this
// specially formatted notes section:
String contentType = fileMetadata.getDataFile().getContentType();
if (!StringUtilisEmpty(contentType)) {
xmlw.writeStartElement("notes");
writeAttribute(xmlw, "level", LEVEL_FILE);
writeAttribute(xmlw, "type", NOTE_TYPE_CONTENTTYPE);
writeAttribute(xmlw, "subject", NOTE_SUBJECT_CONTENTTYPE);
xmlw.writeCharacters(contentType);
// notes
xmlw.writeEndElement();
}
// otherMat
xmlw.writeEndElement();
}
}
}
use of edu.harvard.iq.dataverse.FileMetadata in project dataverse by IQSS.
the class DdiExportUtil method createDataDscr.
// Methods specific to the tabular data ("<dataDscr>") section.
// Note that these do NOT operate on DTO objects, but instead directly
// on Dataverse DataVariable, DataTable, etc. objects.
// This is because for this release (4.5) we are recycling the already available
// code, and this is what we got. (We already have DTO objects for DataTable,
// and DataVariable, etc., but the current version JsonPrinter.jsonAsDatasetDto()
// does not produce JSON for these objects - it stops at DataFile. Eventually
// we want all of our objects to be exportable as JSON, and then all the exports
// can go through the same DTO state... But we don't have time for it now;
// plus, the structure of file-level metadata is currently being re-designed,
// so we probably should not invest any time into it right now). -- L.A. 4.5
private static void createDataDscr(XMLStreamWriter xmlw, DatasetVersion datasetVersion) throws XMLStreamException {
if (datasetVersion.getFileMetadatas() == null || datasetVersion.getFileMetadatas().isEmpty()) {
return;
}
boolean tabularData = false;
// tabular datafile.
for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) {
DataFile dataFile = fileMetadata.getDataFile();
if (dataFile != null && dataFile.isTabularData()) {
if (!tabularData) {
xmlw.writeStartElement("dataDscr");
tabularData = true;
}
List<DataVariable> vars = dataFile.getDataTable().getDataVariables();
for (DataVariable var : vars) {
createVarDDI(xmlw, var);
}
}
}
if (tabularData) {
// dataDscr
xmlw.writeEndElement();
}
}
use of edu.harvard.iq.dataverse.FileMetadata in project dataverse by IQSS.
the class RestrictFileCommand method executeImpl.
@Override
protected void executeImpl(CommandContext ctxt) throws CommandException {
// check if public install & don't allow
boolean defaultValue = false;
boolean publicInstall = ctxt.settings().isTrueForKey(SettingsServiceBean.Key.PublicInstall, defaultValue);
if (publicInstall) {
throw new CommandExecutionException("Restricting files is not permitted on a public installation.", this);
}
if (file.getOwner() == null) {
// this is a new file through upload, restrict
file.getFileMetadata().setRestricted(restrict);
file.setRestricted(restrict);
} else {
Dataset dataset = file.getOwner();
DatasetVersion workingVersion = dataset.getEditVersion();
// check if this file is already restricted or already unrestricted
if ((restrict && file.getFileMetadata().isRestricted()) || (!restrict && !file.getFileMetadata().isRestricted())) {
String text = restrict ? "restricted" : "unrestricted";
throw new CommandExecutionException("File " + file.getDisplayName() + " is already " + text, this);
}
// check if this dataset is a draft (should be), then we can update restrict
if (workingVersion.isDraft()) {
// because we must update the working version metadata
if (dataset.isReleased()) {
for (FileMetadata fmw : workingVersion.getFileMetadatas()) {
if (file.equals(fmw.getDataFile())) {
fmw.setRestricted(restrict);
if (!file.isReleased()) {
file.setRestricted(restrict);
}
}
}
} else {
file.getFileMetadata().setRestricted(restrict);
if (!file.isReleased()) {
file.setRestricted(restrict);
}
if (file.getFileMetadata().isRestricted() != restrict) {
throw new CommandExecutionException("Failed to update the file metadata", this);
}
}
} else {
throw new CommandExecutionException("Working version must be a draft", this);
}
}
}
use of edu.harvard.iq.dataverse.FileMetadata in project dataverse by IQSS.
the class DDIExportServiceBean method createDatasetDDI.
private void createDatasetDDI(XMLStreamWriter xmlw, Set<String> excludedFieldSet, Set<String> includedFieldSet, DatasetVersion version) throws XMLStreamException {
xmlw.writeStartElement("codeBook");
xmlw.writeDefaultNamespace("http://www.icpsr.umich.edu/DDI");
writeAttribute(xmlw, "version", "2.0");
createStdyDscr(xmlw, excludedFieldSet, includedFieldSet, version);
// Files:
List<FileMetadata> tabularDataFiles = new ArrayList<>();
List<FileMetadata> otherDataFiles = new ArrayList<>();
List<FileMetadata> fileMetadatas = version.getFileMetadatas();
if (fileMetadatas == null || fileMetadatas.isEmpty()) {
// codeBook
xmlw.writeEndElement();
return;
}
for (FileMetadata fileMetadata : fileMetadatas) {
if (fileMetadata.getDataFile().isTabularData()) {
tabularDataFiles.add(fileMetadata);
} else {
otherDataFiles.add(fileMetadata);
}
}
if (checkField("fileDscr", excludedFieldSet, includedFieldSet)) {
for (FileMetadata fileMetadata : tabularDataFiles) {
DataTable dt = fileService.findDataTableByFileId(fileMetadata.getDataFile().getId());
createFileDscr(xmlw, excludedFieldSet, includedFieldSet, fileMetadata.getDataFile(), dt);
}
// 2nd pass, to create data (variable) description sections:
xmlw.writeStartElement("dataDscr");
for (FileMetadata fileMetadata : tabularDataFiles) {
DataTable dt = fileService.findDataTableByFileId(fileMetadata.getDataFile().getId());
List<DataVariable> vars = variableService.findByDataTableId(dt.getId());
for (DataVariable var : vars) {
createVarDDI(xmlw, excludedFieldSet, null, var);
}
}
// dataDscr
xmlw.writeEndElement();
}
if (checkField("othrMat", excludedFieldSet, includedFieldSet)) {
for (FileMetadata fileMetadata : otherDataFiles) {
createOtherMat(xmlw, excludedFieldSet, includedFieldSet, fileMetadata);
}
}
// codeBook
xmlw.writeEndElement();
}
use of edu.harvard.iq.dataverse.FileMetadata in project dataverse by IQSS.
the class DdiExportUtil method createFileDscr.
private static void createFileDscr(XMLStreamWriter xmlw, DatasetVersion datasetVersion) throws XMLStreamException {
String dataverseUrl = getDataverseSiteUrl();
for (FileMetadata fileMetadata : datasetVersion.getFileMetadatas()) {
DataFile dataFile = fileMetadata.getDataFile();
if (dataFile != null && dataFile.isTabularData()) {
DataTable dt = dataFile.getDataTable();
xmlw.writeStartElement("fileDscr");
writeAttribute(xmlw, "ID", "f" + dataFile.getId());
writeAttribute(xmlw, "URI", dataverseUrl + "/api/access/datafile/" + dataFile.getId());
xmlw.writeStartElement("fileTxt");
xmlw.writeStartElement("fileName");
xmlw.writeCharacters(fileMetadata.getLabel());
// fileName
xmlw.writeEndElement();
if (dt.getCaseQuantity() != null || dt.getVarQuantity() != null || dt.getRecordsPerCase() != null) {
xmlw.writeStartElement("dimensns");
if (dt.getCaseQuantity() != null) {
xmlw.writeStartElement("caseQnty");
xmlw.writeCharacters(dt.getCaseQuantity().toString());
// caseQnty
xmlw.writeEndElement();
}
if (dt.getVarQuantity() != null) {
xmlw.writeStartElement("varQnty");
xmlw.writeCharacters(dt.getVarQuantity().toString());
// varQnty
xmlw.writeEndElement();
}
if (dt.getRecordsPerCase() != null) {
xmlw.writeStartElement("recPrCas");
xmlw.writeCharacters(dt.getRecordsPerCase().toString());
// recPrCas
xmlw.writeEndElement();
}
// dimensns
xmlw.writeEndElement();
}
xmlw.writeStartElement("fileType");
xmlw.writeCharacters(dataFile.getContentType());
// fileType
xmlw.writeEndElement();
// fileTxt
xmlw.writeEndElement();
// (Universal Numeric Fingerprint) signature:
if (dt.getUnf() != null && !dt.getUnf().equals("")) {
xmlw.writeStartElement("notes");
writeAttribute(xmlw, "level", LEVEL_FILE);
writeAttribute(xmlw, "type", NOTE_TYPE_UNF);
writeAttribute(xmlw, "subject", NOTE_SUBJECT_UNF);
xmlw.writeCharacters(dt.getUnf());
// notes
xmlw.writeEndElement();
}
if (dataFile.getTags() != null) {
for (int i = 0; i < dataFile.getTags().size(); i++) {
xmlw.writeStartElement("notes");
writeAttribute(xmlw, "level", LEVEL_FILE);
writeAttribute(xmlw, "type", NOTE_TYPE_TAG);
writeAttribute(xmlw, "subject", NOTE_SUBJECT_TAG);
xmlw.writeCharacters(dataFile.getTags().get(i).getTypeLabel());
// notes
xmlw.writeEndElement();
}
}
// TODO: add the remaining fileDscr elements!
// fileDscr
xmlw.writeEndElement();
}
}
}
Aggregations