use of org.jbei.ice.lib.dto.FeaturedDNASequence in project ice by JBEI.
the class PartSequence method save.
// creates a new sequence and associates it with entry
public void save(FeaturedDNASequence dnaSequence) {
entryAuthorization.expectWrite(userId, entry);
// check if there is already an existing sequence
if (sequenceDAO.getByEntry(this.entry) != null)
throw new IllegalArgumentException("Entry already has a sequence associated with it. Please delete first");
// update raw sequence if no sequence is passed
// if ((dnaSequence.getFeatures() == null || dnaSequence.getFeatures().isEmpty())) {
// // sometimes the whole sequence is sent in the string portion (when there are no features)
// // no features to add
// dnaSequence = GeneralParser.parse(dnaSequence.getSequence());
// }
// convert sequence wrapper to sequence storage model
Sequence sequence = SequenceUtil.dnaSequenceToSequence(dnaSequence);
if (sequence == null)
return;
saveSequenceObject(sequence);
}
use of org.jbei.ice.lib.dto.FeaturedDNASequence in project ice by JBEI.
the class PartSequence method parseSequenceFile.
/**
* Parses a sequence in a file and associates it with the current entry
*
* @param inputStream input stream of bytes representing the file
* @param fileName name of file being parsed
* @param extractHierarchy for SBOL2 sequences only. If set to <code>true</code>, creates a hierarchy of ICE entries
* as needed
* @return wrapper around the internal model used to represent sequence information
* @throws IOException on Exception parsing the contents of the file
*/
public SequenceInfo parseSequenceFile(InputStream inputStream, String fileName, boolean extractHierarchy) throws IOException {
AbstractParser parser;
// write sequence file to disk (tmp)
String tmpDir = new ConfigurationSettings().getPropertyValue(ConfigurationKey.TEMPORARY_DIRECTORY);
if (StringUtils.isEmpty(tmpDir))
throw new IllegalArgumentException("Cannot parse sequence without valid tmp directory");
Path tmpPath = Paths.get(tmpDir);
if (!Files.isDirectory(tmpPath) || !Files.isWritable(tmpPath))
throw new IllegalArgumentException("Cannot write to tmp directory: " + tmpPath.toString());
Path sequencePath = Paths.get(tmpPath.toString(), UUID.randomUUID().toString() + "-" + fileName);
Files.copy(inputStream, sequencePath, StandardCopyOption.REPLACE_EXISTING);
// detect sequence
SequenceFormat format;
try (InputStream fileInputStream = Files.newInputStream(sequencePath);
LineIterator iterator = IOUtils.lineIterator(fileInputStream, StandardCharsets.UTF_8)) {
if (!iterator.hasNext())
throw new IOException("Cannot read stream for " + fileName);
String firstLine = iterator.next();
format = SequenceUtil.detectFormat(firstLine);
}
// special handling for sbol format
try {
if (format == SBOL2) {
SBOLParser sbolParser = new SBOLParser(this.userId, Long.toString(this.entry.getId()), extractHierarchy);
return sbolParser.parseToEntry(Files.newInputStream(sequencePath), fileName);
}
switch(format) {
case GENBANK:
parser = new GenBankParser();
break;
case FASTA:
parser = new FastaParser();
break;
default:
case PLAIN:
parser = new PlainParser();
break;
}
LineIterator iterator = IOUtils.lineIterator(Files.newInputStream(sequencePath), StandardCharsets.UTF_8);
SequenceFile sequenceFile = new SequenceFile();
String entryType = this.entry.getRecordType();
// special handling for SBOL (todo: clean this up in future release)
FeaturedDNASequence dnaSequence = parser.parse(iterator, entryType);
Sequence sequence = SequenceUtil.dnaSequenceToSequence(dnaSequence);
if (sequence == null)
throw new IOException("Could not create sequence object");
// copy original sequence file to file system
try {
Files.copy(sequencePath, sequenceFile.getFilePath(), StandardCopyOption.REPLACE_EXISTING);
sequence.setSequenceUser(sequenceFile.getFileName());
} catch (Exception e) {
// ok to ignore. Can get back sequence as long as sequence object is saved. cannot download original
Logger.warn("Exception writing sequence to file: " + e.getMessage());
}
sequence.setFileName(fileName);
sequence.setFormat(format);
sequence = saveSequenceObject(sequence);
SequenceInfo info = sequence.toDataTransferObject();
info.setSequence(dnaSequence);
return info;
} catch (InvalidFormatParserException ifpe) {
Logger.error(ifpe);
return null;
} finally {
Files.deleteIfExists(sequencePath);
}
}
use of org.jbei.ice.lib.dto.FeaturedDNASequence in project ice by JBEI.
the class PartSequence method getFeaturedSequence.
private FeaturedDNASequence getFeaturedSequence(Entry entry, boolean canEdit, boolean includeAllFeatures) {
Sequence sequence = sequenceDAO.getByEntry(entry);
if (sequence == null) {
return null;
}
List<SequenceFeature> sequenceFeatures;
if (includeAllFeatures) {
sequenceFeatures = sequenceFeatureDAO.getEntrySequenceFeatures(entry);
} else {
sequenceFeatures = sequenceFeatureDAO.pageSequenceFeatures(entry, 0, 20);
}
FeaturedDNASequence featuredDNASequence = SequenceUtil.sequenceToDNASequence(sequence, sequenceFeatures);
featuredDNASequence.setCanEdit(canEdit);
featuredDNASequence.setIdentifier(entry.getPartNumber());
Configuration configuration = DAOFactory.getConfigurationDAO().get(ConfigurationKey.URI_PREFIX);
if (configuration != null) {
String uriPrefix = configuration.getValue();
featuredDNASequence.setUri(uriPrefix + "/entry/" + entry.getId());
}
return featuredDNASequence;
}
use of org.jbei.ice.lib.dto.FeaturedDNASequence in project ice by JBEI.
the class Sequences method parseSequence.
// public FeaturedDNASequence retrievePartSequence(String userId, String recordId) {
// Entry entry = hasEntry.getEntry(recordId);
// if (entry == null)
// throw new IllegalArgumentException("The part " + recordId + " could not be located");
//
// if (entry.getVisibility() == Visibility.REMOTE.getValue()) {
// WebEntries webEntries = new WebEntries();
// return webEntries.getSequence(recordId);
// }
//
// if (!new PermissionsController().isPubliclyVisible(entry))
// authorization.expectRead(userId, entry);
//
// boolean canEdit = authorization.canWrite(userId, entry);
// return getFeaturedSequence(entry, canEdit);
// }
public SequenceInfo parseSequence(InputStream inputStream, String fileName) throws InvalidFormatParserException {
int dotIndex = fileName.lastIndexOf('.');
if (dotIndex != -1) {
String ext = fileName.substring(dotIndex + 1);
// unique case for sbol since it can result in multiple entries created
if ("rdf".equalsIgnoreCase(ext) || "xml".equalsIgnoreCase(ext) || "sbol".equalsIgnoreCase(ext)) {
// todo : cannot parse sbol yet
return null;
}
}
// parse actual sequence
try {
String sequenceString = IOUtils.toString(inputStream, Charset.defaultCharset());
FeaturedDNASequence dnaSequence = GeneralParser.parse(sequenceString);
if (dnaSequence == null)
throw new InvalidFormatParserException("Could not parse sequence string");
Sequence sequence = SequenceUtil.dnaSequenceToSequence(dnaSequence);
sequence.setSequenceUser(sequenceString);
if (!StringUtils.isBlank(fileName))
sequence.setFileName(fileName);
SequenceInfo info = sequence.toDataTransferObject();
info.setSequence(dnaSequence);
return info;
} catch (IOException e) {
throw new InvalidFormatParserException(e);
}
}
use of org.jbei.ice.lib.dto.FeaturedDNASequence in project ice by JBEI.
the class Sequences method bulkUpdate.
/**
* Bulk update sequences based on uploaded zip file
* containing sequences, where the sequence name is the (unique) part number of the entry.
* If there is an existing sequence associated with the entry, it is deleted.
*
* @param userId userId of user making request
* @param fileInputStream input stream of zip file
*/
public List<String> bulkUpdate(String userId, InputStream fileInputStream) throws IOException {
if (!new AccountController().isAdministrator(userId))
throw new PermissionException("Must have admin privileges to use this feature");
List<String> errors = new ArrayList<>();
Logger.info("Starting bulk update of sequences");
try (ZipInputStream stream = new ZipInputStream(fileInputStream)) {
ZipEntry zipEntry;
while ((zipEntry = stream.getNextEntry()) != null) {
if (zipEntry.isDirectory())
continue;
String name = zipEntry.getName();
if (name.contains("/"))
name = name.substring(name.lastIndexOf("/") + 1);
if (name.startsWith(".") || name.startsWith("_"))
continue;
ByteArrayOutputStream out = new ByteArrayOutputStream();
int len;
byte[] data = new byte[1024];
while ((len = stream.read(data)) > 0) {
out.write(data, 0, len);
}
stream.closeEntry();
String entryName = name.substring(0, name.indexOf('.'));
Entry entry = DAOFactory.getEntryDAO().getByPartNumber(entryName);
if (entry == null) {
errors.add(name);
continue;
}
String sequenceString = new String(out.toByteArray());
FeaturedDNASequence dnaSequence = GeneralParser.parse(sequenceString);
if (dnaSequence == null) {
Logger.error("Could not parse sequence for " + name);
errors.add(name);
continue;
}
Logger.info("Updating sequence for entry " + entry.getPartNumber() + " as part of bulk update");
PartSequence partSequence = new PartSequence(userId, entry.getPartNumber());
partSequence.delete();
partSequence.save(dnaSequence);
}
}
Logger.info("Completed bulk upload of sequences with " + errors.size() + " errors");
return errors;
}
Aggregations