use of org.jbei.ice.lib.dto.entry.EntryField in project ice by JBEI.
the class RemoteEntriesAsCSV method writeDataEntries.
protected void writeDataEntries(RemotePartner partner, List<PartData> entries, List<EntryField> fields, CSVWriter writer, ZipOutputStream zos) {
if (entries == null)
return;
for (PartData partData : entries) {
String[] line = new String[fields.size() + 4];
line[0] = partner.getUrl();
line[1] = new Date(partData.getCreationTime()).toString();
line[2] = partData.getPartId();
int i = 2;
for (EntryField field : fields) {
line[i + 1] = PartDataUtil.entryFieldToValue(partData, field);
i += 1;
}
// write sequence to zip file
if (partData.isHasSequence()) {
try {
// get remote sequence
FeaturedDNASequence featuredDNASequence = remoteEntries.getPublicEntrySequence(partner.getId(), Long.toString(partData.getId()));
if (featuredDNASequence != null) {
String name = partData.getPartId() + ".gb";
// write sequence to zip
line[i + 1] = name;
Sequence sequence = SequenceController.dnaSequenceToSequence(featuredDNASequence);
GenbankFormatter genbankFormatter = new GenbankFormatter(name);
ByteArrayOutputStream byteStream = new ByteArrayOutputStream();
genbankFormatter.format(sequence, byteStream);
ByteArrayWrapper wrapper = new ByteArrayWrapper(byteStream.toByteArray(), name);
putZipEntry(wrapper, zos);
} else {
line[i + 1] = "";
}
} catch (Exception e) {
line[i + 1] = "";
}
} else {
line[i + 1] = "";
}
writer.writeNext(line);
}
}
use of org.jbei.ice.lib.dto.entry.EntryField in project ice by JBEI.
the class RemoteEntriesAsCSV method getCSVHeaders.
protected String[] getCSVHeaders(List<EntryField> fields) {
String[] headers = new String[fields.size() + 4];
headers[0] = "Registry";
headers[1] = "Created";
headers[2] = "Part ID";
int i = 2;
for (EntryField field : fields) {
i += 1;
headers[i] = field.getLabel();
}
headers[i + 1] = "Sequence File";
return headers;
}
use of org.jbei.ice.lib.dto.entry.EntryField in project ice by JBEI.
the class FileResource method downloadCSV.
/**
* Extracts the csv information and writes it to the temp dir and returns the file uuid. Then
* the client is expected to make another rest call with the uuid in a separate window. This
* workaround is due to not being able to download files using XHR or sumsuch
*/
@POST
@Path("csv")
@Consumes(MediaType.APPLICATION_JSON)
@Produces(MediaType.APPLICATION_JSON)
public Response downloadCSV(@QueryParam("sequenceFormats") final List<String> sequenceFormats, @QueryParam("entryFields") final List<String> fields, EntrySelection selection) {
String userId = super.requireUserId();
EntriesAsCSV entriesAsCSV = new EntriesAsCSV(sequenceFormats.toArray(new String[sequenceFormats.size()]));
List<EntryField> entryFields = new ArrayList<>();
try {
if (fields != null) {
entryFields.addAll(fields.stream().map(EntryField::fromString).collect(Collectors.toList()));
}
} catch (Exception e) {
Logger.error(e);
}
boolean success = entriesAsCSV.setSelectedEntries(userId, selection, entryFields.toArray(new EntryField[entryFields.size()]));
if (!success)
return super.respond(false);
final File file = entriesAsCSV.getFilePath().toFile();
if (file.exists()) {
return Response.ok(new Setting("key", file.getName())).build();
}
return Response.serverError().build();
}
use of org.jbei.ice.lib.dto.entry.EntryField in project ice by JBEI.
the class EntriesAsCSV method getEntryFields.
protected EntryField[] getEntryFields() {
Set<String> recordTypes = new HashSet<>(dao.getRecordTypes(entries));
List<EntryField> fields = EntryFields.getCommonFields();
for (String recordType : recordTypes) {
EntryType type = EntryType.nameToType(recordType);
if (type == null) {
Logger.error("Could not convert entry type " + recordType);
continue;
}
switch(type) {
case ARABIDOPSIS:
EntryFields.addArabidopsisSeedHeaders(fields);
break;
case STRAIN:
EntryFields.addStrainHeaders(fields);
break;
case PLASMID:
EntryFields.addPlasmidHeaders(fields);
break;
}
}
return fields.toArray(new EntryField[(fields.size())]);
}
use of org.jbei.ice.lib.dto.entry.EntryField in project ice by JBEI.
the class BulkZipUpload method processUpload.
/**
* Process the zip file. It expects that there is exactly one file with the .csv extension.
* This means that a .csv cannot be used as an attachment
* </p>
* Also, all dot files are ignored
*/
public ProcessedBulkUpload processUpload() {
ProcessedBulkUpload processedBulkUpload = new ProcessedBulkUpload();
String csvFile = null;
HashMap<String, InputStream> files = new HashMap<>();
try {
ZipFile zipFile = new ZipFile(zipFilePath.toFile());
Enumeration<? extends ZipEntry> enumeration = zipFile.entries();
// go through zip elements
while (enumeration.hasMoreElements()) {
ZipEntry zipEntry = enumeration.nextElement();
// does not go into directories for now
if (zipEntry.isDirectory())
continue;
String name = zipEntry.getName();
if (name.contains("/"))
name = name.substring(name.lastIndexOf("/") + 1);
// ignore all dot files
if (name.startsWith("."))
continue;
// get main csv
if (name.endsWith(".csv")) {
if (csvFile != null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Duplicate csv file in zip archive. It should only contain one.");
return processedBulkUpload;
}
csvFile = Utils.getString(zipFile.getInputStream(zipEntry));
} else {
InputStream inputStream = zipFile.getInputStream(zipEntry);
files.put(name, inputStream);
}
}
} catch (IOException e) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage(e.getCause().getMessage());
return processedBulkUpload;
}
if (csvFile == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Could not find a csv file in the zip archive");
return processedBulkUpload;
}
try {
ByteArrayInputStream inputStream = new ByteArrayInputStream(csvFile.getBytes(StandardCharsets.UTF_8));
// retrieve the partData and validates
List<PartWithSample> updates = super.getBulkUploadDataFromFile(inputStream);
if (updates == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Validation failed");
for (EntryField field : invalidFields) {
processedBulkUpload.getHeaders().add(new EntryHeaderValue(false, field));
}
return processedBulkUpload;
}
// validate files to ensure that for each partData with a file, that the file is available
for (PartWithSample partWithSample : updates) {
// check sequences
PartData data = partWithSample.getPartData();
String sequenceFile = data.getSequenceFileName();
if (StringUtils.isNotBlank(sequenceFile) && files.get(sequenceFile) == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Sequence file \"" + sequenceFile + "\" not found in the zip archive");
return processedBulkUpload;
}
// check attachments
String attachmentFile;
if (data.getAttachments() != null && !data.getAttachments().isEmpty()) {
attachmentFile = data.getAttachments().get(0).getFilename();
if (StringUtils.isNotBlank(attachmentFile) && files.get(attachmentFile) == null) {
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage("Attachment file \"" + sequenceFile + "\" not found in the zip archive");
return processedBulkUpload;
}
}
// todo : trace sequences
}
// create actual registry parts
BulkEntryCreator creator = new BulkEntryCreator();
long uploadId = creator.createBulkUpload(userId, addType);
// create entries
if (!creator.createEntries(userId, uploadId, updates, files)) {
String errorMsg = "Error creating entries for upload";
throw new IOException(errorMsg);
//todo: delete upload id
}
processedBulkUpload.setUploadId(uploadId);
return processedBulkUpload;
} catch (IOException e) {
Logger.error(e);
processedBulkUpload.setSuccess(false);
processedBulkUpload.setUserMessage(e.getCause() != null ? e.getCause().getMessage() : e.getMessage());
return processedBulkUpload;
}
}
Aggregations