use of org.jbei.ice.lib.dto.entry.EntryFieldLabel in project ice by JBEI.
the class RemoteEntriesAsCSV method writeLocalEntries.
protected void writeLocalEntries(List<Long> entries, List<EntryFieldLabel> fields, CSVWriter writer, ZipOutputStream zos) {
if (entries == null)
return;
SequenceDAO sequenceDAO = DAOFactory.getSequenceDAO();
Configuration configuration = DAOFactory.getConfigurationDAO().get(ConfigurationKey.URI_PREFIX);
String thisUrl = configuration == null ? "" : configuration.getValue();
for (Long id : entries) {
Entry entry = DAOFactory.getEntryDAO().get(id);
String[] line = new String[fields.size() + 4];
line[0] = thisUrl;
line[1] = entry.getCreationTime().toString();
line[2] = entry.getPartNumber();
int i = 2;
for (EntryFieldLabel field : fields) {
line[i + 1] = EntryUtil.entryFieldToValue(entry, field);
i += 1;
}
// write sequence to zip file
long entryId = entry.getId();
if (sequenceDAO.hasSequence(entryId)) {
String name = entry.getPartNumber() + ".gb";
try {
Sequence sequence = sequenceDAO.getByEntry(entry);
line[i + 1] = name;
GenbankFormatter genbankFormatter = new GenbankFormatter(name);
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
genbankFormatter.format(sequence, byteStream);
ByteArrayInputStream inputStream = new ByteArrayInputStream(byteStream.toByteArray());
InputStreamWrapper wrapper = new InputStreamWrapper(inputStream, name);
putZipEntry(wrapper, zos);
} catch (Exception e) {
line[i + 1] = "";
}
} else {
line[i + 1] = "";
}
writer.writeNext(line);
}
}
use of org.jbei.ice.lib.dto.entry.EntryFieldLabel in project ice by JBEI.
the class FileResource method downloadCSV.
/**
* Extracts the csv information and writes it to the temp dir and returns the file uuid. Then
* the client is expected to make another rest call with the uuid in a separate window. This
* workaround is due to not being able to download files using XHR or sumsuch
*/
@POST
@Path("csv")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response downloadCSV(@QueryParam("sequenceFormats") final List<String> sequenceFormats, @QueryParam("entryFields") final List<String> fields, EntrySelection selection) {
String userId = super.requireUserId();
EntriesAsCSV entriesAsCSV = new EntriesAsCSV(userId, sequenceFormats.toArray(new String[0]));
List<EntryFieldLabel> entryFieldLabels = new ArrayList<>();
try {
if (fields != null && !fields.isEmpty()) {
entryFieldLabels.addAll(fields.stream().map(EntryFieldLabel::fromString).collect(Collectors.toList()));
}
} catch (Exception e) {
Logger.error(e);
}
boolean success = entriesAsCSV.setSelectedEntries(selection, entryFieldLabels.toArray(new EntryFieldLabel[0]));
if (!success)
return super.respond(false);
final File file = entriesAsCSV.getFilePath().toFile();
if (file.exists()) {
return Response.ok(new Setting("key", file.getName())).build();
}
return Response.serverError().build();
}
use of org.jbei.ice.lib.dto.entry.EntryFieldLabel in project ice by JBEI.
the class BulkUploadEntries method submitBulkImportDraft.
/**
* Submits a bulk import that has been saved. This action is restricted to the owner of the
* draft or to administrators.
*/
private ProcessedBulkUpload submitBulkImportDraft(String userId, BulkUpload draft, ProcessedBulkUpload processedBulkUpload) throws PermissionException {
// validate entries
BulkUploadValidation validation = new BulkUploadValidation(draft);
if (!validation.isValid()) {
processedBulkUpload.setSuccess(false);
for (EntryFieldLabel entryFieldLabel : validation.getFailedFields()) {
processedBulkUpload.getHeaders().add(new EntryHeaderValue(entryFieldLabel));
}
processedBulkUpload.setUserMessage("Cannot submit your bulk upload due to a validation failure");
return processedBulkUpload;
}
draft.setStatus(BulkUploadStatus.PENDING_APPROVAL);
draft.setLastUpdateTime(new Date());
draft.setName(userId);
BulkUpload bulkUpload = dao.update(draft);
if (bulkUpload != null) {
// convert entries to pending
dao.setEntryStatus(bulkUpload, Visibility.PENDING);
String email = Utils.getConfigValue(ConfigurationKey.BULK_UPLOAD_APPROVER_EMAIL);
if (email != null && !email.isEmpty()) {
String subject = Utils.getConfigValue(ConfigurationKey.PROJECT_NAME) + " Bulk Upload Notification";
String body = "A bulk upload has been submitted and is pending verification.\n\n";
body += "Please login to the registry at:\n\n";
body += Utils.getConfigValue(ConfigurationKey.URI_PREFIX);
body += "\n\nand use the \"Pending Approval\" menu item to approve it\n\nThanks.";
EmailFactory.getEmail().send(email, subject, body);
}
return processedBulkUpload;
}
return null;
}
use of org.jbei.ice.lib.dto.entry.EntryFieldLabel in project ice by JBEI.
the class BulkUploadEntries method createOrUpdateEntry.
/**
* Creates (or updates) entry based on information in parameters
*
* @param autoUpdate wrapper for information used to create entry
* @return updated wrapper for information used to create entry. Will contain additional information
* such as the unique identifier for the part, if one was created
*/
BulkUploadAutoUpdate createOrUpdateEntry(BulkUploadAutoUpdate autoUpdate) {
Account account = accountController.getByEmail(userId);
// for bulk edit, drafts will not exist
if (autoUpdate.getEditMode() != EditMode.BULK_EDIT) {
autoUpdate.setBulkUploadId(this.upload.getId());
}
// for strain with plasmid this is the strain
Entry entry = entryDAO.get(autoUpdate.getEntryId());
// if entry is null, create entry
if (entry == null) {
PartData partData = new PartData(autoUpdate.getType());
partData.setOwner(account.getFullName());
partData.setOwnerEmail(account.getEmail());
partData.setCreator(account.getFullName());
partData.setCreatorEmail(account.getEmail());
partData = entries.create(partData);
entry = entryDAO.get(partData.getId());
autoUpdate.setEntryId(entry.getId());
if (this.upload != null) {
this.upload.getContents().add(entry);
}
}
// now update the values (for strain with plasmid, some values are for both
for (Map.Entry<EntryFieldLabel, String> set : autoUpdate.getKeyValue().entrySet()) {
String value = set.getValue();
EntryFieldLabel field = set.getKey();
InfoToModelFactory.infoToEntryForField(entry, new String[] { value }, field);
}
// if (this.upload != null && this.upload.getStatus() != BulkUploadStatus.PENDING_APPROVAL) {
// if (otherEntry != null && autoUpdate.getEditMode() != EditMode.BULK_EDIT) {
// if (otherEntry.getVisibility() == null || otherEntry.getVisibility() != Visibility.DRAFT.getValue())
// otherEntry.setVisibility(Visibility.DRAFT.getValue());
//
// updateEntry(otherEntry);
// }
//
// if ((entry.getVisibility() == null || entry.getVisibility() != Visibility.DRAFT.getValue())
// && autoUpdate.getEditMode() != EditMode.BULK_EDIT)
// entry.setVisibility(Visibility.DRAFT.getValue());
// }
updateEntry(entry);
// update bulk upload. even if no new entry was created, entries belonging to it was updated
if (this.upload != null) {
this.upload.setLastUpdateTime(new Date());
autoUpdate.setLastUpdate(this.upload.getLastUpdateTime());
dao.update(this.upload);
}
return autoUpdate;
}
use of org.jbei.ice.lib.dto.entry.EntryFieldLabel in project ice by JBEI.
the class BulkZipUpload method processUpload.
/**
* Process the zip file. It expects that there is exactly one file with the .csv extension.
* This means that a .csv cannot be used as an attachment
* </p>
* Also, all dot files are ignored
*/
public ProcessedBulkUpload processUpload() {
ProcessedBulkUpload processedBulkUpload = new ProcessedBulkUpload();
InputStream csvFile = null;
HashMap<String, InputStream> files = new HashMap<>();
try (BufferedInputStream bis = new BufferedInputStream(inputStream);
ZipInputStream zis = new ZipInputStream(bis)) {
ZipEntry zipEntry;
while ((zipEntry = zis.getNextEntry()) != null) {
if (zipEntry.isDirectory())
continue;
String name = zipEntry.getName();
if (name.contains("/"))
name = name.substring(name.lastIndexOf("/") + 1);
// ignore all dot files
if (name.startsWith("."))
continue;
int len;
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
byte[] buffer = new byte[1024];
while ((len = zis.read(buffer)) > 0) {
byteArrayOutputStream.write(buffer, 0, len);
}
zis.closeEntry();
// get main csv
if (name.endsWith(".csv")) {
if (csvFile != null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Duplicate csv file in zip archive. It should only contain one.");
return processedBulkUpload;
} else {
csvFile = new ByteArrayInputStream(byteArrayOutputStream.toByteArray());
}
} else {
files.put(name, new ByteArrayInputStream(byteArrayOutputStream.toByteArray()));
}
}
} catch (IOException e) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage(e.getCause().getMessage());
return processedBulkUpload;
}
if (csvFile == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Could not find a csv file in the zip archive");
return processedBulkUpload;
}
try {
// retrieve the partData and validates
List<PartWithSample> updates = super.getBulkUploadDataFromFile(csvFile);
if (updates == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Validation failed");
for (EntryFieldLabel field : invalidFields) {
processedBulkUpload.getHeaders().add(new EntryHeaderValue(field));
}
return processedBulkUpload;
}
// validate files to ensure that for each partData with a file, that the file is available
for (PartWithSample partWithSample : updates) {
// check sequences
PartData data = partWithSample.getPartData();
String sequenceFile = data.getSequenceFileName();
if (StringUtils.isNotBlank(sequenceFile) && files.get(sequenceFile) == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Sequence file \"" + sequenceFile + "\" not found in the zip archive");
return processedBulkUpload;
}
// check attachments
String attachmentFile;
if (data.getAttachments() != null && !data.getAttachments().isEmpty()) {
attachmentFile = data.getAttachments().get(0).getFilename();
if (StringUtils.isNotBlank(attachmentFile) && files.get(attachmentFile) == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Attachment file \"" + sequenceFile + "\" not found in the zip archive");
return processedBulkUpload;
}
}
// todo : trace sequences
}
// create actual registry parts
BulkUploadEntries creator = new BulkUploadEntries(userId, this.uploadId);
// create entries
if (!creator.createEntries(updates, files)) {
String errorMsg = "Error creating entries for upload";
throw new IOException(errorMsg);
// todo: delete upload id
}
processedBulkUpload.setUploadId(uploadId);
return processedBulkUpload;
} catch (IOException e) {
Logger.error(e);
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage(e.getCause() != null ? e.getCause().getMessage() : e.getMessage());
return processedBulkUpload;
}
}
Aggregations