use of ca.corefacility.bioinformatics.irida.processing.FileProcessorException in project irida by phac-nml.
the class GzipFileProcessor method processSingleFile.
/**
* Process a single {@link SequenceFile}
*
* @param sequenceFile
* file to process
* @throws FileProcessorException
* if an error occurs while processing
*/
public void processSingleFile(SequenceFile sequenceFile) throws FileProcessorException {
Path file = sequenceFile.getFile();
String nameWithoutExtension = file.getFileName().toString();
// strip the extension from the filename (if necessary)
if (nameWithoutExtension.endsWith(GZIP_EXTENSION)) {
nameWithoutExtension = nameWithoutExtension.substring(0, nameWithoutExtension.lastIndexOf(GZIP_EXTENSION));
}
try {
logger.trace("About to try handling a gzip file.");
if (isCompressed(file)) {
file = addExtensionToFilename(file, GZIP_EXTENSION);
try (GZIPInputStream zippedInputStream = new GZIPInputStream(Files.newInputStream(file))) {
logger.trace("Handling gzip compressed file.");
Path targetDirectory = Files.createTempDirectory(null);
Path target = targetDirectory.resolve(nameWithoutExtension);
logger.debug("Target directory is [" + targetDirectory + "]");
logger.debug("Writing uncompressed file to [" + target + "]");
Files.copy(zippedInputStream, target);
sequenceFile.setFile(target);
sequenceFile = sequenceFileRepository.save(sequenceFile);
if (removeCompressedFile) {
logger.debug("Removing original compressed files [file.processing.decompress.remove.compressed.file=true]");
try {
Files.delete(file);
} catch (final Exception e) {
logger.error("Failed to remove the original compressed file.", e);
// outer try/catch block:
throw e;
}
}
}
}
} catch (Exception e) {
logger.error("Failed to process the input file [" + sequenceFile + "]; stack trace follows.", e);
throw new FileProcessorException("Failed to process input file [" + sequenceFile + "].");
}
}
use of ca.corefacility.bioinformatics.irida.processing.FileProcessorException in project irida by phac-nml.
the class DefaultFileProcessingChainTest method testFailWithContinueChain.
@Test
public void testFailWithContinueChain() throws FileProcessorTimeoutException {
FileProcessingChain fileProcessingChain = new DefaultFileProcessingChain(objectRepository, qcRepository, new FailingFileProcessor());
when(objectRepository.exists(objectId)).thenReturn(true);
List<Exception> exceptions = fileProcessingChain.launchChain(1L);
// exceptions should be ignored in this test
assertEquals("exactly one exception should have been ignored.", 1, exceptions.size());
assertTrue("ignored exception should be of type FileProcessorException.", exceptions.iterator().next() instanceof FileProcessorException);
}
use of ca.corefacility.bioinformatics.irida.processing.FileProcessorException in project irida by phac-nml.
the class DefaultFileProcessingChainTest method testFailWriteQCEntry.
@Test
public void testFailWriteQCEntry() throws FileProcessorTimeoutException {
FileProcessingChain fileProcessingChain = new DefaultFileProcessingChain(objectRepository, qcRepository, new FailingFileProcessorNoContinue());
when(objectRepository.exists(objectId)).thenReturn(true);
boolean exceptionCaught = false;
try {
fileProcessingChain.launchChain(1L);
} catch (FileProcessorException e) {
exceptionCaught = true;
}
assertTrue("File process should have thrown exception", exceptionCaught);
ArgumentCaptor<QCEntry> captor = ArgumentCaptor.forClass(QCEntry.class);
verify(qcRepository).save(captor.capture());
QCEntry qcEntry = captor.getValue();
assertEquals("should have saved qc entry for sample", seqObject, qcEntry.getSequencingObject());
}
Aggregations