use of htsjdk.variant.vcf.VCFCodec in project gatk by broadinstitute.
the class GenomicsDBImportIntegrationTest method checkGenomicsDBAgainstExpected.
private static void checkGenomicsDBAgainstExpected(final String workspace, final SimpleInterval interval, final String expectedCombinedVCF) throws IOException {
final GenomicsDBFeatureReader<VariantContext, PositionalBufferedStream> genomicsDBFeatureReader = new GenomicsDBFeatureReader<>(new File(workspace, GenomicsDBConstants.DEFAULT_VIDMAP_FILE_NAME).getAbsolutePath(), new File(workspace, GenomicsDBConstants.DEFAULT_CALLSETMAP_FILE_NAME).getAbsolutePath(), workspace, GenomicsDBConstants.DEFAULT_ARRAY_NAME, b38_reference_20_21, null, new BCF2Codec());
final AbstractFeatureReader<VariantContext, LineIterator> combinedVCFReader = AbstractFeatureReader.getFeatureReader(expectedCombinedVCF, new VCFCodec(), true);
try (CloseableTribbleIterator<VariantContext> actualVcs = genomicsDBFeatureReader.query(interval.getContig(), interval.getStart(), interval.getEnd());
CloseableTribbleIterator<VariantContext> expectedVcs = combinedVCFReader.query(interval.getContig(), interval.getStart(), interval.getEnd())) {
BaseTest.assertCondition(actualVcs, expectedVcs, (a, e) -> {
// TODO: Temporary hacks to make this test pass. Must be removed later
if (// allele order
e.getStart() != 17967343 && e.getStart() != 17966384 && // split block
e.getEnd() != 17981447) {
VariantContextTestUtils.assertVariantContextsAreEqual(a, e, Collections.emptyList());
}
});
}
}
use of htsjdk.variant.vcf.VCFCodec in project gatk-protected by broadinstitute.
the class GenotypeGVCFsIntegrationTest method getVariantContexts.
/**
* Returns a list of VariantContext records from a VCF file
*
* @param vcfFile VCF file
* @return list of VariantContext records
* @throws IOException if the file does not exist or can not be opened
*/
private static List<VariantContext> getVariantContexts(final File vcfFile) throws IOException {
final VCFCodec codec = new VCFCodec();
final FileInputStream s = new FileInputStream(vcfFile);
final LineIterator lineIteratorVCF = codec.makeSourceFromStream(new PositionalBufferedStream(s));
codec.readHeader(lineIteratorVCF);
final List<VariantContext> VCs = new ArrayList<>();
while (lineIteratorVCF.hasNext()) {
final String line = lineIteratorVCF.next();
Assert.assertFalse(line == null);
VCs.add(codec.decode(line));
}
return VCs;
}
use of htsjdk.variant.vcf.VCFCodec in project hmftools by hartwigmedical.
the class SomaticVariantFactoryTest method createTestCodec.
@NotNull
private static VCFCodec createTestCodec() {
VCFCodec codec = new VCFCodec();
VCFHeader header = new VCFHeader(Sets.newHashSet(), Sets.newHashSet(SAMPLE));
codec.setVCFHeader(header, VCFHeaderVersion.VCF4_2);
return codec;
}
use of htsjdk.variant.vcf.VCFCodec in project hmftools by hartwigmedical.
the class BlacklistPredicateTest method setup.
@Before
public void setup() throws SAXException {
BachelorSchema schema = BachelorSchema.make();
final Program program = schema.processXML(Paths.get(BLACKLIST_XML));
assertNotNull(program);
blacklistPredicate = new BlacklistPredicate(Sets.newHashSet("ENST00000380152"), program.getBlacklist());
VCFHeader header = new VCFHeader(Sets.newHashSet(), Sets.newHashSet(SAMPLE));
codec = new VCFCodec();
codec.setVCFHeader(header, VCFHeaderVersion.VCF4_2);
}
use of htsjdk.variant.vcf.VCFCodec in project jvarkit by lindenb.
the class VCFComposite method doWork.
@Override
public int doWork(final List<String> args) {
PrintWriter out = null;
try {
out = super.openFileOrStdoutAsPrintWriter(this.outputFile);
if (listModels) {
for (final Type t : Type.values()) {
out.println(t.name());
out.println("\t" + t.getDescription());
}
out.flush();
return 0;
}
this.pedigree = Pedigree.newParser().parse(pedigreeFile);
if (this.pedigree.getAffected().isEmpty()) {
LOG.error("No Affected sample in " + this.pedigreeFile);
return -1;
}
if (this.pedigree.getUnaffected().isEmpty()) {
LOG.error("No Unaffected sample in " + this.pedigreeFile);
return -1;
}
final DiseaseModel model = this.createModel();
final String inputName = super.oneFileOrNull(args);
final LineIterator r = (inputName == null ? IOUtils.openStreamForLineIterator(stdin()) : IOUtils.openURIForLineIterator(inputName));
final VCFCodec codec = new VCFCodec();
final VCFHeader header = (VCFHeader) codec.readActualHeader(r);
final AnnPredictionParser annParser = new AnnPredictionParserFactory(header).get();
final VepPredictionParser vepParser = new VepPredictionParserFactory(header).get();
// final VCFHeader h2=new VCFHeader(header.getMetaDataInInputOrder(),header.getSampleNamesInOrder());
// h2.addMetaDataLine(new VCFInfoHeaderLine(this.TAG,1,VCFHeaderLineType.String,"Values from bigwig file: "+BIGWIG));
SortingCollection<GeneAndVariant> sorting = null;
String prevContig = null;
for (; ; ) {
String line;
final VariantContext ctx;
if (r.hasNext()) {
line = r.next();
ctx = codec.decode(line);
} else {
line = null;
ctx = null;
}
if (ctx == null || !ctx.getContig().equals(prevContig)) {
if (sorting != null) {
LOG.debug("Dump contig " + prevContig);
sorting.doneAdding();
CloseableIterator<GeneAndVariant> iter2 = sorting.iterator();
EqualRangeIterator<GeneAndVariant> eqiter = new EqualRangeIterator<>(iter2, (A, B) -> A.gene.compareTo(B.gene));
while (eqiter.hasNext()) {
final List<GeneAndVariant> variants = eqiter.next();
model.scan(variants.get(0).gene, variants.stream().map(L -> codec.decode(L.ctxLine)).collect(Collectors.toList()), out);
}
eqiter.close();
iter2.close();
sorting.cleanup();
}
sorting = null;
if (ctx == null)
break;
prevContig = ctx.getContig();
}
if (!ctx.isVariant())
continue;
if (!acceptFiltered && ctx.isFiltered())
continue;
if (!acceptID && ctx.hasID())
continue;
if (!model.accept(ctx))
continue;
final Set<String> geneKeys = new HashSet<>();
for (final AnnPredictionParser.AnnPrediction pred : annParser.getPredictions(ctx)) {
geneKeys.addAll(pred.getGeneKeys().stream().map(S -> ctx.getContig() + "_" + S).collect(Collectors.toSet()));
}
for (final VepPredictionParser.VepPrediction pred : vepParser.getPredictions(ctx)) {
geneKeys.addAll(pred.getGeneKeys().stream().map(S -> ctx.getContig() + "_" + S).collect(Collectors.toSet()));
}
if (sorting == null) {
sorting = SortingCollection.newInstance(GeneAndVariant.class, new GeneAndVariantCodec(), (A, B) -> {
int i = A.gene.compareTo(B.gene);
if (i != 0)
return i;
return A.ctxLine.compareTo(B.ctxLine);
}, this.writingSortingCollection.getMaxRecordsInRam(), this.writingSortingCollection.getTmpPaths());
sorting.setDestructiveIteration(true);
}
for (final String gk : geneKeys) {
final GeneAndVariant gav = new GeneAndVariant();
gav.gene = gk;
gav.ctxLine = line;
sorting.add(gav);
}
}
out.flush();
out.close();
out = null;
return 0;
} catch (Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(out);
}
}
Aggregations