use of org.jbei.ice.lib.dto.entry.PartData in project ice by JBEI.
the class CollectionEntries method getAvailableEntries.
/**
* Retrieves entries available to user. "Availability" is determined by any permissions set on the entries
*
* @param field sort field
* @param asc sort order
* @param offset paging start
* @param limit maximum number of entries to retrieve
* @param filter optional text to filter entries by
* @return wrapper around list of parts that conform to the parameters and the maximum number
* of such entries that are available
*/
protected Results<PartData> getAvailableEntries(ColumnField field, boolean asc, int offset, int limit, String filter) {
VisibleEntries visibleEntries = new VisibleEntries(userId);
List<PartData> entries = visibleEntries.getEntries(field, asc, offset, limit, filter);
long count = visibleEntries.getEntryCount(filter);
Results<PartData> results = new Results<>();
results.setResultCount(count);
results.setData(entries);
return results;
}
use of org.jbei.ice.lib.dto.entry.PartData in project ice by JBEI.
the class Collection method getBulkUploadFolder.
/**
* Retrieves bulk import and entries associated with it that are referenced by the
* id in the parameter. Only owners or administrators are allowed to retrieve bulk imports
*
* @param id unique identifier for bulk import
* @param offset offset for upload entries (start)
* @param limit maximum number of entries to return with the upload
* @return data transfer object with the retrieved bulk import data and associated entries
* @throws PermissionException
*/
protected AbstractFolder getBulkUploadFolder(long id, int offset, int limit) {
BulkUploadDAO uploadDAO = DAOFactory.getBulkUploadDAO();
BulkUploadAuthorization authorization = new BulkUploadAuthorization();
BulkUpload draft = uploadDAO.get(id);
if (draft == null)
return null;
Account account = DAOFactory.getAccountDAO().getByEmail(userId);
authorization.expectRead(account.getEmail(), draft);
// retrieve the entries associated with the bulk import
BulkUploadInfo info = draft.toDataTransferObject();
List<Entry> list = uploadDAO.retrieveDraftEntries(id, offset, limit);
for (Entry entry : list) {
PartData partData = setFileData(userId, entry, ModelToInfoFactory.getInfo(entry));
// check if any links and convert
if (!entry.getLinkedEntries().isEmpty()) {
Entry linked = (Entry) entry.getLinkedEntries().toArray()[0];
PartData linkedData = partData.getLinkedParts().remove(0);
linkedData = setFileData(userId, linked, linkedData);
partData.getLinkedParts().add(linkedData);
}
info.getEntryList().add(partData);
}
info.setCount(uploadDAO.retrieveSavedDraftCount(id));
return info;
}
use of org.jbei.ice.lib.dto.entry.PartData in project ice by JBEI.
the class BulkZipUpload method processUpload.
/**
* Process the zip file. It expects that there is exactly one file with the .csv extension.
* This means that a .csv cannot be used as an attachment
* </p>
* Also, all dot files are ignored
*/
public ProcessedBulkUpload processUpload() {
ProcessedBulkUpload processedBulkUpload = new ProcessedBulkUpload();
String csvFile = null;
HashMap<String, InputStream> files = new HashMap<>();
try {
ZipFile zipFile = new ZipFile(zipFilePath.toFile());
Enumeration<? extends ZipEntry> enumeration = zipFile.entries();
// go through zip elements
while (enumeration.hasMoreElements()) {
ZipEntry zipEntry = enumeration.nextElement();
// does not go into directories for now
if (zipEntry.isDirectory())
continue;
String name = zipEntry.getName();
if (name.contains("/"))
name = name.substring(name.lastIndexOf("/") + 1);
// ignore all dot files
if (name.startsWith("."))
continue;
// get main csv
if (name.endsWith(".csv")) {
if (csvFile != null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Duplicate csv file in zip archive. It should only contain one.");
return processedBulkUpload;
}
csvFile = Utils.getString(zipFile.getInputStream(zipEntry));
} else {
InputStream inputStream = zipFile.getInputStream(zipEntry);
files.put(name, inputStream);
}
}
} catch (IOException e) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage(e.getCause().getMessage());
return processedBulkUpload;
}
if (csvFile == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Could not find a csv file in the zip archive");
return processedBulkUpload;
}
try {
ByteArrayInputStream inputStream = new ByteArrayInputStream(csvFile.getBytes(StandardCharsets.UTF_8));
// retrieve the partData and validates
List<PartWithSample> updates = super.getBulkUploadDataFromFile(inputStream);
if (updates == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Validation failed");
for (EntryField field : invalidFields) {
processedBulkUpload.getHeaders().add(new EntryHeaderValue(false, field));
}
return processedBulkUpload;
}
// validate files to ensure that for each partData with a file, that the file is available
for (PartWithSample partWithSample : updates) {
// check sequences
PartData data = partWithSample.getPartData();
String sequenceFile = data.getSequenceFileName();
if (StringUtils.isNotBlank(sequenceFile) && files.get(sequenceFile) == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Sequence file \"" + sequenceFile + "\" not found in the zip archive");
return processedBulkUpload;
}
// check attachments
String attachmentFile;
if (data.getAttachments() != null && !data.getAttachments().isEmpty()) {
attachmentFile = data.getAttachments().get(0).getFilename();
if (StringUtils.isNotBlank(attachmentFile) && files.get(attachmentFile) == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Attachment file \"" + sequenceFile + "\" not found in the zip archive");
return processedBulkUpload;
}
}
// todo : trace sequences
}
// create actual registry parts
BulkEntryCreator creator = new BulkEntryCreator();
long uploadId = creator.createBulkUpload(userId, addType);
// create entries
if (!creator.createEntries(userId, uploadId, updates, files)) {
String errorMsg = "Error creating entries for upload";
throw new IOException(errorMsg);
//todo: delete upload id
}
processedBulkUpload.setUploadId(uploadId);
return processedBulkUpload;
} catch (IOException e) {
Logger.error(e);
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage(e.getCause() != null ? e.getCause().getMessage() : e.getMessage());
return processedBulkUpload;
}
}
use of org.jbei.ice.lib.dto.entry.PartData in project ice by JBEI.
the class TransferTask method execute.
public void execute() {
RemoteTransfer transfer = new RemoteTransfer();
Account account = DAOFactory.getAccountDAO().getByEmail(userId);
if (account.getType() != AccountType.ADMIN)
return;
Entries retriever = new Entries(account.getEmail());
List<Long> entries = retriever.getEntriesFromSelectionContext(entrySelection);
Logger.info(userId + ": requesting transfer to " + remoteId);
List<PartData> dataList = transfer.getPartsForTransfer(entries);
List<Long> remoteIds = transfer.transferEntries(remoteId, dataList);
// check folder
if (StringUtils.isEmpty(this.entrySelection.getFolderId()))
return;
// create remoteFolder
long folderId = Long.decode(this.entrySelection.getFolderId());
Folder folder = DAOFactory.getFolderDAO().get(folderId);
Logger.info("Adding " + remoteIds.size() + " transferred entries to remote folder");
transfer.transferFolder(remoteId, folder.toDataTransferObject(), remoteIds);
}
use of org.jbei.ice.lib.dto.entry.PartData in project ice by JBEI.
the class RemoteTransfer method transferEntries.
/**
* Performs the transfer of the entry objects to the remote partner specified.
* It is the responsibility of the destination to ensure that the hierarchical reln is reconstructed
*
* @param remoteId unique identifier for remote partner the parts are to be transferred to
* @param entries list of entries to be transferred. Note that the entries contain the linked
* entries as well and these may or may not already exist on the recipient
* @return list of ids of the transferred entries. These are the ids on the remote recipient and not this ice instance
*/
public List<Long> transferEntries(long remoteId, List<PartData> entries) {
RemotePartner partner = this.remotePartnerDAO.get(remoteId);
if (partner == null)
throw new IllegalArgumentException("Invalid remote host id: " + remoteId);
int exceptionCount = 0;
String url = partner.getUrl();
List<Long> remoteIds = new LinkedList<>();
for (PartData data : entries) {
try {
PartData object = remoteContact.transferPart(url, data);
if (object == null) {
exceptionCount += 1;
continue;
}
remoteIds.add(object.getId());
if (data.getLinkedParts() != null) {
remoteIds.addAll(object.getLinkedParts().stream().map(PartData::getId).collect(Collectors.toList()));
}
// transfers attachments and sequences
performTransfer(partner, data);
} catch (Exception e) {
exceptionCount += 1;
if (exceptionCount >= 5) {
Logger.error(e);
Logger.error(exceptionCount + " exceptions encountered during transfer. Aborting");
return null;
}
Logger.error(e);
}
}
return remoteIds;
}
Aggregations