use of java.nio.channels.SeekableByteChannel in project gatk by broadinstitute.
the class ReadsDataSourceUnitTest method testCloudBamWithCustomReaderFactoryAndWrappers.
@Test(dataProvider = "cloudXorTestData", groups = { "bucket" })
public void testCloudBamWithCustomReaderFactoryAndWrappers(final List<Path> bams, final List<Path> indices) {
final SamReaderFactory customFactory = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.STRICT);
// The input files are XOR'd with a constant. We use a wrapper to XOR it back.
// If the code uses the wrong wrapper, or omits one, then the test will fail.
Function<SeekableByteChannel, SeekableByteChannel> xorData = XorWrapper.forKey((byte) 74);
Function<SeekableByteChannel, SeekableByteChannel> xorIndex = XorWrapper.forKey((byte) 80);
try (final ReadsDataSource readsSource = new ReadsDataSource(bams, indices, customFactory, xorData, xorIndex)) {
Assert.assertTrue(readsSource.indicesAvailable(), "Explicitly-provided indices not detected for bams: " + bams);
final Iterator<GATKRead> queryReads = readsSource.query(new SimpleInterval("1", 1, 300));
int queryCount = 0;
while (queryReads.hasNext()) {
++queryCount;
queryReads.next();
}
Assert.assertEquals(queryCount, 2, "Wrong number of reads returned in query");
}
}
use of java.nio.channels.SeekableByteChannel in project gatk by broadinstitute.
the class GcsNioIntegrationTest method testCloseWhilePrefetching.
@Test(groups = { "cloud" })
public void testCloseWhilePrefetching() throws Exception {
final String large = getGCPTestInputPath() + largeFilePath;
SeekableByteChannel chan = new SeekableByteChannelPrefetcher(Files.newByteChannel(Paths.get(URI.create(large))), 10 * 1024 * 1024);
// read just 1 byte, get the prefetching going
ByteBuffer one = ByteBuffer.allocate(1);
chan.read(one);
// closing must not throw an exception, even if the prefetching
// thread is active.
chan.close();
}
use of java.nio.channels.SeekableByteChannel in project gatk by broadinstitute.
the class SeekableByteChannelPrefetcherTest method testCloseWhilePrefetching.
@Test
public void testCloseWhilePrefetching() throws Exception {
SeekableByteChannel chan = new SeekableByteChannelPrefetcher(Files.newByteChannel(Paths.get(input)), 10 * 1024 * 1024);
// read just 1 byte, get the prefetching going
ByteBuffer one = ByteBuffer.allocate(1);
readFully(chan, one);
// closing must not throw an exception, even if the prefetching
// thread is active.
chan.close();
}
use of java.nio.channels.SeekableByteChannel in project gatk by broadinstitute.
the class SeekableByteChannelPrefetcherTest method testRead.
@Test
public void testRead() throws Exception {
SeekableByteChannel chan1 = Files.newByteChannel(Paths.get(input));
SeekableByteChannel chan2 = new SeekableByteChannelPrefetcher(Files.newByteChannel(Paths.get(input)), 1024);
testReading(chan1, chan2, 0);
testReading(chan1, chan2, 128);
testReading(chan1, chan2, 1024);
testReading(chan1, chan2, 1500);
testReading(chan1, chan2, 2048);
testReading(chan1, chan2, 3000);
testReading(chan1, chan2, 6000);
}
use of java.nio.channels.SeekableByteChannel in project gatk by broadinstitute.
the class ParallelCopyGCSDirectoryIntoHDFSSpark method readChunkToHdfs.
private static final Tuple2<Integer, String> readChunkToHdfs(final String inputGCSPathFinal, final long chunkSize, final Integer chunkNum, final String outputDirectory) {
final Path gcsPath = IOUtils.getPath(inputGCSPathFinal);
final String basename = gcsPath.getName(gcsPath.getNameCount() - 1).toString();
org.apache.hadoop.fs.Path outputPath = new org.apache.hadoop.fs.Path(outputDirectory);
final String chunkPath = outputPath + "/" + basename + ".chunk." + chunkNum;
try (SeekableByteChannel channel = Files.newByteChannel(gcsPath);
final OutputStream outputStream = new BufferedOutputStream(BucketUtils.createFile(chunkPath))) {
final long start = chunkSize * (long) chunkNum;
channel.position(start);
ByteBuffer byteBuffer = ByteBuffer.allocateDirect((int) Math.min(SIXTY_FOUR_MIB, chunkSize));
long bytesRead = 0;
while (channel.read(byteBuffer) > 0) {
byteBuffer.flip();
while (byteBuffer.hasRemaining() && bytesRead < chunkSize) {
byte b = byteBuffer.get();
outputStream.write(b);
bytesRead++;
}
if (bytesRead == chunkSize) {
break;
}
if (bytesRead > chunkSize) {
throw new GATKException("Encountered an unknown error condition and read too many bytes; output file may be corrupt");
}
byteBuffer.clear();
}
} catch (IOException e) {
throw new GATKException(e.getMessage() + "; inputGCSPathFinal = " + inputGCSPathFinal, e);
}
return new Tuple2<>(chunkNum, chunkPath);
}
Aggregations