use of org.broadinstitute.hellbender.utils.tsv.TableColumnCollection in project gatk by broadinstitute.
the class PCATangentNormalizationResult method writeTangentBetaHats.
/**
* Write the beta-hats.
* If the output file is null, we do not write a file.
*/
private void writeTangentBetaHats(final File file, final String commandLine) {
if (file == null) {
return;
}
final List<String> countColumnNames = targetFactorNormalizedCounts.columnNames();
final List<String> columnNames = new ArrayList<>(countColumnNames.size() + 1);
columnNames.add(PON_SAMPLE_BETA_HAT_COLUMN_NAME);
columnNames.addAll(countColumnNames);
final TableColumnCollection columns = new TableColumnCollection(columnNames);
try (final TableWriter<Integer> writer = TableUtils.writer(file, columns, (i, dataLine) -> dataLine.append(Integer.toString(i)).append(tangentBetaHats.getRow(i)))) {
writer.writeComment("fileFormat = tsv");
writer.writeComment("commandLine = " + commandLine);
writer.writeComment("title = Tangent normalization Beta Hats");
for (int i = 0; i < tangentBetaHats.getRowDimension(); i++) {
writer.writeRecord(i);
}
} catch (final IOException ex) {
throw new UserException.CouldNotCreateOutputFile(file, ex.getMessage());
}
}
use of org.broadinstitute.hellbender.utils.tsv.TableColumnCollection in project gatk by broadinstitute.
the class ReadCountRecordUnitTest method testAppendCountsToBeyondEnd.
@Test(dataProvider = "testNonZeroCountsData", dependsOnMethods = "testAppendCountsTo", expectedExceptions = IllegalStateException.class)
public void testAppendCountsToBeyondEnd(@SuppressWarnings("unused") final String testName, final BiFunction<Target, double[], ? extends ReadCountRecord> constructor, final int size) {
final double[] counts = generateCounts(size);
final ReadCountRecord record = constructor.apply(TEST_TARGET, counts);
final List<String> columnNames = Stream.concat(Stream.concat(IntStream.range(0, 10).mapToObj(i -> "pre-padding_" + i), IntStream.range(0, counts.length).mapToObj(i -> "column_" + i)), IntStream.range(0, 10).mapToObj(i -> "post-padding_" + i)).collect(Collectors.toList());
final TableColumnCollection columns = new TableColumnCollection(columnNames);
final DataLine dataLine = new DataLine(columns, RuntimeException::new);
final double[] copiedCounts = new double[counts.length + 20];
Arrays.fill(copiedCounts, -11);
dataLine.seek(columnNames.size());
record.appendCountsTo(dataLine);
}
use of org.broadinstitute.hellbender.utils.tsv.TableColumnCollection in project gatk by broadinstitute.
the class ReadCountRecordUnitTest method testAppendCountsTo.
@Test(dataProvider = "testData", dependsOnMethods = "testCreation")
public void testAppendCountsTo(@SuppressWarnings("unused") final String testName, final BiFunction<Target, double[], ? extends ReadCountRecord> constructor, final int size) {
final double[] counts = generateCounts(size);
final boolean round = testName.equals("long[]");
final ReadCountRecord record = constructor.apply(TEST_TARGET, counts);
final List<String> columnNames = Stream.concat(Stream.concat(IntStream.range(0, 10).mapToObj(i -> "pre-padding_" + i), IntStream.range(0, counts.length).mapToObj(i -> "column_" + i)), IntStream.range(0, 10).mapToObj(i -> "post-padding_" + i)).collect(Collectors.toList());
final TableColumnCollection columns = new TableColumnCollection(columnNames);
final DataLine dataLine = new DataLine(columns, RuntimeException::new);
final double[] copiedCounts = new double[counts.length + 20];
Arrays.fill(copiedCounts, -11);
for (int i = 0; i < 10 + 10 + counts.length; i++) {
dataLine.append("-11");
}
dataLine.seek(10);
record.appendCountsTo(dataLine);
// Check the copied values.
if (!round) {
for (int i = 0; i < counts.length; i++) {
Assert.assertEquals(dataLine.getDouble(10 + i), counts[i], 0.0);
}
} else {
for (int i = 0; i < counts.length; i++) {
Assert.assertEquals(dataLine.getDouble(10 + i), Math.round(counts[i]), 0.00001);
}
}
// Check that the padding remains intact:
for (int i = 0; i < 10; i++) {
Assert.assertEquals(dataLine.get(i), "-11");
}
for (int i = counts.length + 10; i < copiedCounts.length; i++) {
Assert.assertEquals(dataLine.get(i), "-11");
}
}
use of org.broadinstitute.hellbender.utils.tsv.TableColumnCollection in project gatk-protected by broadinstitute.
the class PCATangentNormalizationResult method writeTangentBetaHats.
/**
* Write the beta-hats.
* If the output file is null, we do not write a file.
*/
private void writeTangentBetaHats(final File file, final String commandLine) {
if (file == null) {
return;
}
final List<String> countColumnNames = targetFactorNormalizedCounts.columnNames();
final List<String> columnNames = new ArrayList<>(countColumnNames.size() + 1);
columnNames.add(PON_SAMPLE_BETA_HAT_COLUMN_NAME);
columnNames.addAll(countColumnNames);
final TableColumnCollection columns = new TableColumnCollection(columnNames);
try (final TableWriter<Integer> writer = TableUtils.writer(file, columns, (i, dataLine) -> dataLine.append(Integer.toString(i)).append(tangentBetaHats.getRow(i)))) {
writer.writeComment("fileFormat = tsv");
writer.writeComment("commandLine = " + commandLine);
writer.writeComment("title = Tangent normalization Beta Hats");
for (int i = 0; i < tangentBetaHats.getRowDimension(); i++) {
writer.writeRecord(i);
}
} catch (final IOException ex) {
throw new UserException.CouldNotCreateOutputFile(file, ex.getMessage());
}
}
use of org.broadinstitute.hellbender.utils.tsv.TableColumnCollection in project gatk-protected by broadinstitute.
the class SexGenotypeDataCollection method write.
/**
* Write the collection to a writer. If extended genotyping inference data is available, they will
* be also written as optional columns.
*
* @param dataWriter an instance of {@link Writer}
*/
public void write(@Nonnull final Writer dataWriter) {
if (sexGenotypeDataList.isEmpty()) {
throw new IllegalStateException("The sex genotype data collection is empty");
}
/* check if extended genotyping information is available; first, check for nulls */
boolean extended = true;
if (sexGenotypeDataList.stream().filter(dat -> !dat.hasExtendedGenotypingInfo()).count() > 0) {
extended = false;
}
Set<String> commonGenotypes = new HashSet<>();
/* check if there is a non-empty intersection */
if (extended) {
/* pool all genotypes */
commonGenotypes = new HashSet<>(sexGenotypeDataList.get(0).getSexGenotypesSet());
for (final SexGenotypeData dat : sexGenotypeDataList) {
commonGenotypes = Sets.intersection(commonGenotypes, dat.getSexGenotypesSet());
}
if (commonGenotypes.isEmpty()) {
extended = false;
}
}
final TableColumnCollection columns;
if (extended) {
final List<String> columnNames = new ArrayList<>();
columnNames.addAll(SexGenotypeTableColumn.MANDATORY_SEX_GENOTYPE_COLUMNS.names());
columnNames.addAll(commonGenotypes);
columns = new TableColumnCollection(columnNames);
} else {
columns = new TableColumnCollection(SexGenotypeTableColumn.MANDATORY_SEX_GENOTYPE_COLUMNS.names());
}
try (final SexGenotypeTableWriter writer = new SexGenotypeTableWriter(dataWriter, columns)) {
writer.writeAllRecords(sexGenotypeDataList);
} catch (final IOException e) {
throw new UserException.CouldNotCreateOutputFile("Could not write sex genotype data", e);
}
}
Aggregations