use of org.broadinstitute.hdf5.HDF5File in project gatk by broadinstitute.
the class HDF5LibraryUnitTest method testCreateLargeMatrix.
@Test
public void testCreateLargeMatrix() {
// Creates a large PoN of junk values and simply tests that these can be written and read.
// Make a big, fake set of read counts.
final int numRows = 2500000;
final int numCols = 10;
final double mean = 3e-7;
final double sigma = 1e-9;
final RealMatrix bigCounts = createMatrixOfGaussianValues(numRows, numCols, mean, sigma);
final File tempOutputHD5 = IOUtils.createTempFile("big-ol-", ".hd5");
final HDF5File hdf5File = new HDF5File(tempOutputHD5, HDF5File.OpenMode.CREATE);
final String hdf5Path = "/test/m";
hdf5File.makeDoubleMatrix(hdf5Path, bigCounts.getData());
hdf5File.close();
final HDF5File hdf5FileForReading = new HDF5File(tempOutputHD5, HDF5File.OpenMode.READ_ONLY);
final double[][] result = hdf5FileForReading.readDoubleMatrix(hdf5Path);
final RealMatrix resultAsRealMatrix = new Array2DRowRealMatrix(result);
Assert.assertTrue(resultAsRealMatrix.getRowDimension() == numRows);
Assert.assertTrue(resultAsRealMatrix.getColumnDimension() == numCols);
final RealMatrix readMatrix = new Array2DRowRealMatrix(result);
PoNTestUtils.assertEqualsMatrix(readMatrix, bigCounts, false);
}
use of org.broadinstitute.hdf5.HDF5File in project gatk by broadinstitute.
the class HDF5LibraryUnitTest method testOpenReadOnly.
@Test
public void testOpenReadOnly() {
final HDF5File reader = new HDF5File(TEST_PON);
reader.close();
}
use of org.broadinstitute.hdf5.HDF5File in project gatk by broadinstitute.
the class HDF5LibraryUnitTest method testMakeDouble.
@Test()
public void testMakeDouble() throws IOException {
final File testFile = File.createTempFile("hdf5", ".hd5");
HDF5File file = new HDF5File(testFile, HDF5File.OpenMode.CREATE);
file.makeGroup("test-group/double-group");
Assert.assertTrue(file.makeDouble("test-group/double-group/my-double", 1.1));
System.err.println(testFile);
file.close();
final long time = System.currentTimeMillis();
Assert.assertTrue(testFile.length() > 0);
Assert.assertTrue(testFile.lastModified() <= time);
file = new HDF5File(testFile, HDF5File.OpenMode.READ_ONLY);
final double theDouble = file.readDouble("test-group/double-group/my-double");
Assert.assertEquals(theDouble, 1.1);
file.close();
}
use of org.broadinstitute.hdf5.HDF5File in project gatk by broadinstitute.
the class HDF5LibraryUnitTest method testOpenReadOnlyOnBadFile.
@Test(expectedExceptions = HDF5LibException.class)
public void testOpenReadOnlyOnBadFile() {
final HDF5File reader = new HDF5File(new File("/tmp/no-file"));
reader.close();
}
use of org.broadinstitute.hdf5.HDF5File in project gatk-protected by broadinstitute.
the class AllelicPanelOfNormals method read.
/**
* Reads an allelic panel of normals from an HDF5 or tab-separated file (file type is automatically detected).
* Tab-separated files should have global hyperparameter values alpha and beta specified by comment lines
* denoted by {@code GLOBAL_ALPHA_COMMENT_STRING} and {@code GLOBAL_BETA_COMMENT_STRING}:
* <p>
* #GLOBAL_ALPHA=...<br>
* #GLOBAL_BETA=...
* </p>
* followed by lines specifying hyperparameter values at each site,
* with column headers as in {@link AllelicPanelOfNormalsTableColumn}:
* <p>
* CONTIG \t POSITION \t ALPHA \t BETA
* </p>
*
* Note that we opt for a static read method as opposed to a constructor that takes a file parameter because
* the former allows us to return the static {@code EMPTY_PON} if the allelic panel of normals is not present in
* an HDF5 file.
* @param inputFile HDF5 file containing a coverage panel of normals created by {@link CreatePanelOfNormals}
* and an allelic panel of normals created and set by {@link CreateAllelicPanelOfNormals}
* ({@code EMPTY_PON} is returned if the latter was never set), or a
* tab-separated file that contains global hyperparameters in comment lines and lines specifying hyperparameter values at each site
*/
public static AllelicPanelOfNormals read(final File inputFile) {
IOUtils.canReadFile(inputFile);
if (isHDF5File(inputFile)) {
//construct from HDF5 file
try (final HDF5File hdf5File = new HDF5File(inputFile)) {
final AllelicPanelOfNormals allelicPoN = HDF5AllelicPoNUtils.read(hdf5File);
logger.info(String.format("Loaded allelic panel of normals from HDF5 file: %s.", inputFile));
return allelicPoN;
}
} else {
//construct from TSV file
try (final AllelicPanelOfNormalsReader reader = new AllelicPanelOfNormalsReader(inputFile)) {
//parse comment lines for global hyperparameter values
final AllelicPanelOfNormals.HyperparameterValues globalHyperparameterValues = parseGlobalHyperparameterValues(inputFile);
//parse data lines for local hyperparameter values
final Map<SimpleInterval, HyperparameterValues> siteToHyperparameterValuesMap = new HashMap<>();
reader.stream().forEach(s -> siteToHyperparameterValuesMap.put(s.getKey(), s.getValue()));
final AllelicPanelOfNormals allelicPoN = new AllelicPanelOfNormals(globalHyperparameterValues, siteToHyperparameterValuesMap);
logger.info(String.format("Loaded allelic panel of normals from TSV file: %s.", inputFile));
return allelicPoN;
} catch (final IOException | UncheckedIOException ex) {
throw new UserException.CouldNotReadInputFile(inputFile, ex);
}
}
}
Aggregations