use of htsjdk.variant.variantcontext.writer.VariantContextWriter in project jvarkit by lindenb.
the class VCFComm method doWork.
@Override
public int doWork(final List<String> args) {
CloseableIterator<LineAndFile> iter = null;
SortingCollection<LineAndFile> variants = null;
VariantContextWriter w = null;
try {
if (args.isEmpty()) {
LOG.error("Illegal number of arguments");
return -1;
}
Set<VCFHeaderLine> metaData = new HashSet<VCFHeaderLine>();
variants = SortingCollection.newInstance(LineAndFile.class, new LineAndFileCodec(), new LineAndFileComparator(), super.sortingCollectionArgs.getMaxRecordsInRam(), super.sortingCollectionArgs.getTmpPaths());
variants.setDestructiveIteration(true);
/**
* new sample names in the output vcf: one sample per file
*/
final Map<Integer, String> fileid2sampleName = new TreeMap<>();
/**
* samples names as they appear in the original VCF headers
*/
final Counter<String> countInputSamples = new Counter<String>();
/**
* dicts
*/
final List<SAMSequenceDictionary> all_dictionaries = new ArrayList<>();
for (final String vcffilename : IOUtils.unrollFiles(args)) {
LOG.info("Reading from " + vcffilename);
final Input input = super.put(variants, vcffilename);
String sampleName = vcffilename;
if (sampleName.endsWith(".vcf.gz")) {
sampleName = sampleName.substring(0, sampleName.length() - 7);
} else if (sampleName.endsWith(".vcf.gz")) {
sampleName = sampleName.substring(0, sampleName.length() - 4);
}
int slash = sampleName.lastIndexOf(File.separatorChar);
if (slash != -1)
sampleName = sampleName.substring(slash + 1);
int suffix = 1;
// loop until we find a uniq name
for (; ; ) {
final String key = sampleName + (suffix == 1 ? "" : "_" + suffix);
if (fileid2sampleName.values().contains(key)) {
suffix++;
continue;
}
fileid2sampleName.put(input.file_id, key);
metaData.add(new VCFHeaderLine(key, vcffilename));
break;
}
for (final String sname : input.codecAndHeader.header.getSampleNamesInOrder()) {
countInputSamples.incr(sname);
}
all_dictionaries.add(input.codecAndHeader.header.getSequenceDictionary());
}
variants.doneAdding();
/**
* unique sample name, if any present in all VCF
*/
Optional<String> unqueSampleName = Optional.empty();
if (countInputSamples.getCountCategories() == 1 && countInputSamples.count(countInputSamples.keySet().iterator().next()) == fileid2sampleName.size()) {
unqueSampleName = Optional.of(countInputSamples.keySet().iterator().next());
LOG.info("Unique sample name is " + unqueSampleName.get());
}
VCFStandardHeaderLines.addStandardFormatLines(metaData, true, VCFConstants.DEPTH_KEY, VCFConstants.GENOTYPE_QUALITY_KEY, VCFConstants.GENOTYPE_KEY, VCFConstants.GENOTYPE_FILTER_KEY);
VCFStandardHeaderLines.addStandardInfoLines(metaData, true, VCFConstants.DEPTH_KEY, VCFConstants.ALLELE_COUNT_KEY, VCFConstants.ALLELE_NUMBER_KEY);
metaData.add(new VCFHeaderLine(getClass().getSimpleName(), "version:" + getVersion() + " command:" + getProgramCommandLine()));
final VCFFilterHeaderLine variantNotCalledInAllVcf = new VCFFilterHeaderLine("NotCalledEveryWhere", "Variant was NOT called in all input VCF");
metaData.add(variantNotCalledInAllVcf);
final VCFFilterHeaderLine variantWasFiltered = new VCFFilterHeaderLine("VariantWasFiltered", "At least one variant was filtered");
metaData.add(variantWasFiltered);
final VCFFormatHeaderLine variantQUALFormat = new VCFFormatHeaderLine("VCQUAL", 1, VCFHeaderLineType.Float, "Variant Quality");
metaData.add(variantQUALFormat);
metaData.add(new VCFFormatHeaderLine(VCFConstants.ALLELE_NUMBER_KEY, 1, VCFHeaderLineType.Integer, "Number of allle in the src vcf"));
metaData.add(new VCFFormatHeaderLine(VCFConstants.ALLELE_COUNT_KEY, 1, VCFHeaderLineType.Integer, "Number of ALT alllele"));
final VCFInfoHeaderLine foundInCountVcfInfo = new VCFInfoHeaderLine("NVCF", 1, VCFHeaderLineType.Integer, "Number of VCF this variant was found");
metaData.add(foundInCountVcfInfo);
final VCFInfoHeaderLine variantTypesInfo = new VCFInfoHeaderLine("VTYPES", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Distinct Variants type");
metaData.add(variantTypesInfo);
final VCFFilterHeaderLine multipleTypeFilters = new VCFFilterHeaderLine("DiscordantTypes", "Discordant types at this position");
metaData.add(multipleTypeFilters);
final VCFFormatHeaderLine variantTypeFormat = new VCFFormatHeaderLine("VTYPE", 1, VCFHeaderLineType.String, "Variant Type");
metaData.add(variantTypeFormat);
final VCFFilterHeaderLine uniqueVariantDiscordantGTFilter;
if (unqueSampleName.isPresent()) {
metaData.add(new VCFHeaderLine("UniqSample", unqueSampleName.get()));
uniqueVariantDiscordantGTFilter = new VCFFilterHeaderLine("DiscordantGenotypeForUniqSample", "Genotype Dicordant for for sample " + unqueSampleName.get());
metaData.add(uniqueVariantDiscordantGTFilter);
} else {
uniqueVariantDiscordantGTFilter = null;
}
final VCFHeader header = new VCFHeader(metaData, new ArrayList<>(fileid2sampleName.values()));
if (// all have a dict
!normalize_chr && !all_dictionaries.contains(null)) {
SAMSequenceDictionary thedict = null;
for (int x = 0; x < all_dictionaries.size(); ++x) {
SAMSequenceDictionary d = all_dictionaries.get(x);
if (thedict == null) {
thedict = d;
} else if (!SequenceUtil.areSequenceDictionariesEqual(d, thedict)) {
thedict = null;
break;
}
}
if (thedict != null)
header.setSequenceDictionary(thedict);
}
w = super.openVariantContextWriter(super.outputFile);
w.writeHeader(header);
final List<LineAndFile> row = new ArrayList<LineAndFile>(super.inputs.size());
final Comparator<LineAndFile> posCompare = (A, B) -> A.getContigPosRef().compareTo(B.getContigPosRef());
iter = variants.iterator();
for (; ; ) {
LineAndFile rec = null;
if (iter.hasNext()) {
rec = iter.next();
}
if (rec == null || (!row.isEmpty() && posCompare.compare(row.get(0), rec) != 0)) {
if (!row.isEmpty()) {
final VariantContext first = row.get(0).getContext();
/* in which file id we find this variant */
Set<Integer> fileids_for_variant = row.stream().map(LAF -> LAF.fileIdx).collect(Collectors.toSet());
// see with HAS multiple chrom/pos/ref but different alt
if (row.size() != fileids_for_variant.size()) {
for (; ; ) {
boolean ok = true;
for (int x = 0; ok && x + 1 < row.size(); ++x) {
final VariantContext ctxx = row.get(x).getContext();
final List<Allele> altsx = ctxx.getAlternateAlleles();
for (int y = x + 1; ok && y < row.size(); ++y) {
if (row.get(x).fileIdx != row.get(y).fileIdx)
continue;
final VariantContext ctxy = row.get(y).getContext();
final List<Allele> altsy = ctxy.getAlternateAlleles();
if (altsx.equals(altsy))
continue;
if (!ctxx.isVariant() && ctxy.isVariant()) {
row.remove(x);
} else if (ctxx.isVariant() && !ctxy.isVariant()) {
row.remove(y);
} else if (!ctxx.isSNP() && ctxy.isSNP()) {
row.remove(x);
} else if (ctxx.isSNP() && !ctxy.isSNP()) {
row.remove(y);
} else if (altsx.size() > altsy.size()) {
row.remove(x);
} else if (altsx.size() < altsy.size()) {
row.remove(y);
} else {
row.remove(y);
}
ok = false;
break;
}
}
if (ok)
break;
}
fileids_for_variant = row.stream().map(LAF -> LAF.fileIdx).collect(Collectors.toSet());
}
if (row.size() != fileids_for_variant.size()) {
LOG.error("There are some duplicated variants at the position " + new ContigPosRef(first) + " in the same vcf file");
for (final LineAndFile laf : row) {
LOG.error("File [" + laf.fileIdx + "]" + fileid2sampleName.get(laf.fileIdx));
LOG.error("\t" + laf.getContigPosRef());
}
row.clear();
} else {
final Set<Allele> alleles = row.stream().flatMap(R -> R.getContext().getAlleles().stream()).collect(Collectors.toSet());
final VariantContextBuilder vcb = new VariantContextBuilder(getClass().getName(), first.getContig(), first.getStart(), first.getEnd(), alleles);
final Set<String> filters = new HashSet<>();
final Set<VariantContext.Type> variantContextTypes = new HashSet<>();
final List<Genotype> genotypes = new ArrayList<Genotype>();
for (final LineAndFile laf : row) {
if (laf.getContext().isFiltered())
filters.add(variantWasFiltered.getID());
variantContextTypes.add(laf.getContext().getType());
final GenotypeBuilder gbuilder = new GenotypeBuilder();
gbuilder.name(fileid2sampleName.get(laf.fileIdx));
if (unqueSampleName.isPresent()) {
final Genotype g0 = laf.getContext().getGenotype(unqueSampleName.get());
if (g0 == null) {
iter.close();
w.close();
throw new IllegalStateException("Cannot find genotype for " + unqueSampleName.get());
}
if (g0.hasDP())
gbuilder.DP(g0.getDP());
if (g0.hasGQ())
gbuilder.GQ(g0.getGQ());
gbuilder.alleles(g0.getAlleles());
} else {
gbuilder.alleles(Arrays.asList(first.getReference(), first.getReference()));
if (laf.getContext().hasAttribute(VCFConstants.DEPTH_KEY)) {
gbuilder.DP(laf.getContext().getAttributeAsInt(VCFConstants.DEPTH_KEY, 0));
}
}
if (laf.getContext().isFiltered()) {
gbuilder.filter("VCFFILTERED");
}
if (laf.getContext().hasLog10PError()) {
gbuilder.attribute(variantQUALFormat.getID(), laf.getContext().getPhredScaledQual());
}
gbuilder.attribute(VCFConstants.ALLELE_NUMBER_KEY, laf.getContext().getGenotypes().stream().flatMap(G -> G.getAlleles().stream()).filter(A -> !A.isNoCall()).count());
gbuilder.attribute(VCFConstants.ALLELE_COUNT_KEY, laf.getContext().getGenotypes().stream().flatMap(G -> G.getAlleles().stream()).filter(A -> !(A.isReference() || A.isNoCall())).count());
gbuilder.attribute(variantTypeFormat.getID(), laf.getContext().getType().name());
genotypes.add(gbuilder.make());
}
final String id = String.join(";", row.stream().map(LAF -> LAF.getContext()).filter(V -> V.hasID()).map(V -> V.getID()).collect(Collectors.toSet()));
if (!id.isEmpty())
vcb.id(id);
vcb.genotypes(genotypes);
if (unqueSampleName.isPresent()) {
boolean all_same = true;
for (int x = 0; all_same && x + 1 < genotypes.size(); ++x) {
if (!genotypes.get(x).isCalled())
continue;
for (int y = x + 1; all_same && y < genotypes.size(); ++y) {
if (!genotypes.get(y).isCalled())
continue;
if (!genotypes.get(x).sameGenotype(genotypes.get(y), true)) {
all_same = false;
break;
}
}
}
if (!all_same)
filters.add(uniqueVariantDiscordantGTFilter.getID());
}
// Add AN
vcb.attribute(VCFConstants.ALLELE_NUMBER_KEY, genotypes.stream().filter(G -> G.isCalled()).mapToInt(G -> G.getAlleles().size()).sum());
if (!variantContextTypes.isEmpty()) {
vcb.attribute(variantTypesInfo.getID(), new ArrayList<>(variantContextTypes.stream().map(T -> T.name()).collect(Collectors.toSet())));
if (variantContextTypes.size() > 1) {
filters.add(multipleTypeFilters.getID());
}
}
vcb.attribute(foundInCountVcfInfo.getID(), fileids_for_variant.size());
boolean print = true;
if (row.size() == super.inputs.size() && ignore_everywhere) {
print = false;
}
if (fileids_for_variant.size() != fileid2sampleName.size()) {
filters.add(variantNotCalledInAllVcf.getID());
if (only_everywhere) {
print = false;
}
}
vcb.filters(filters);
if (print) {
w.add(vcb.make());
}
}
row.clear();
}
if (rec == null)
break;
}
row.add(rec);
}
iter.close();
iter = null;
w.close();
w = null;
return 0;
} catch (Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(iter);
CloserUtil.close(w);
try {
if (variants != null)
variants.cleanup();
} catch (Exception err) {
}
}
}
use of htsjdk.variant.variantcontext.writer.VariantContextWriter in project jvarkit by lindenb.
the class VcfCompareCallersOneSample method doWork.
@Override
public int doWork(List<String> args) {
File inputFile = null;
List<EqualRangeVcfIterator> listChallengers = new ArrayList<>();
VariantContextWriter vcw = null;
VcfIterator in = null;
try {
in = super.openVcfIterator(oneFileOrNull(args));
VCFHeader header = in.getHeader();
if (header.getNGenotypeSamples() != 1) {
LOG.error("vcf.must.have.only.one.sample");
return -1;
}
VCFHeader h2 = new VCFHeader(header);
h2.addMetaDataLine(new VCFHeaderLine(getClass().getSimpleName() + "CmdLine", String.valueOf(getProgramCommandLine())));
h2.addMetaDataLine(new VCFHeaderLine(getClass().getSimpleName() + "Version", String.valueOf(getVersion())));
h2.addMetaDataLine(new VCFHeaderLine(getClass().getSimpleName() + "HtsJdkVersion", HtsjdkVersion.getVersion()));
h2.addMetaDataLine(new VCFHeaderLine(getClass().getSimpleName() + "HtsJdkHome", HtsjdkVersion.getHome()));
SAMSequenceDictionary dict = header.getSequenceDictionary();
if (dict == null) {
LOG.error("no.dict.in.vcf");
return -1;
}
Comparator<VariantContext> ctxComparator = VCFUtils.createTidPosComparator(dict);
/* load files to be challenged */
for (File cf : this.challengerVcf) {
// do not challenge vs itself
if (inputFile != null && inputFile.equals(cf)) {
LOG.error("Ignoring challenger (self): " + cf);
continue;
}
VcfIterator cin = VCFUtils.createVcfIteratorFromFile(cf);
VCFHeader ch = cin.getHeader();
if (ch.getNGenotypeSamples() != 1) {
LOG.warning("vcf.must.have.only.one.sample");
cin.close();
continue;
}
if (!header.getSampleNamesInOrder().get(0).equals(ch.getSampleNamesInOrder().get(0))) {
LOG.warning("Ignoring " + cf + " because not the same sample.");
cin.close();
continue;
}
SAMSequenceDictionary hdict = ch.getSequenceDictionary();
if (hdict == null || !SequenceUtil.areSequenceDictionariesEqual(dict, hdict)) {
LOG.error("not.the.same.sequence.dictionaries");
return -1;
}
listChallengers.add(new EqualRangeVcfIterator(cin, ctxComparator));
}
vcw = super.openVariantContextWriter(outputFile);
vcw.writeHeader(h2);
SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(dict);
VariantContext prev_ctx = null;
while (in.hasNext() && !vcw.checkError()) {
VariantContext ctx = progress.watch(in.next());
// check input order
if (prev_ctx != null && ctxComparator.compare(prev_ctx, ctx) > 0) {
LOG.error("bad sort order : got\n\t" + prev_ctx + "\nbefore\n\t" + ctx + "\n");
return -1;
}
prev_ctx = ctx;
int countInOtherFiles = 0;
for (EqualRangeVcfIterator citer : listChallengers) {
boolean foundInThatFile = false;
List<VariantContext> ctxChallenging = citer.next(ctx);
for (VariantContext ctx2 : ctxChallenging) {
if (!ctx2.getReference().equals(ctx.getReference()))
continue;
boolean ok = true;
if (!this.ignoreAlternate) {
Set<Allele> myAlt = new HashSet<Allele>(ctx.getAlternateAlleles());
myAlt.removeAll(ctx2.getAlternateAlleles());
if (!myAlt.isEmpty())
ok = false;
}
if (ok) {
foundInThatFile = true;
break;
}
}
countInOtherFiles += (foundInThatFile ? 1 : 0);
}
if (countInOtherFiles >= minCountInclusive && countInOtherFiles <= maxCountInclusive) {
vcw.add(ctx);
}
}
progress.finish();
return 0;
} catch (Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(vcw);
CloserUtil.close(listChallengers);
CloserUtil.close(in);
}
}
use of htsjdk.variant.variantcontext.writer.VariantContextWriter in project jvarkit by lindenb.
the class VcfIn method doWork.
@Override
public int doWork(final List<String> args) {
if (!this.filterIn.isEmpty() && !this.filterOut.isEmpty()) {
LOG.error("Option filterIn/filterOut both defined.");
return -1;
}
if (this.inverse && (!this.filterIn.isEmpty() || !this.filterOut.isEmpty())) {
LOG.error("Option inverse cannot be used when Option filterin/filterou is defined.");
return -1;
}
String databaseVcfUri;
String userVcfUri;
if (args.size() == 1) {
databaseVcfUri = args.get(0);
userVcfUri = null;
} else if (args.size() == 2) {
databaseVcfUri = args.get(0);
userVcfUri = args.get(1);
} else {
LOG.error("illegal number of arguments");
return -1;
}
VariantContextWriter w = null;
VcfIterator in = null;
try {
in = (userVcfUri == null ? VCFUtils.createVcfIteratorFromInputStream(stdin()) : VCFUtils.createVcfIterator(userVcfUri));
w = super.openVariantContextWriter(outputFile);
if (this.databaseIsIndexed) {
return this.scanUsingTabix(w, databaseVcfUri, in);
} else {
return this.scanFileSorted(w, databaseVcfUri, in);
}
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(in);
CloserUtil.close(w);
}
}
use of htsjdk.variant.variantcontext.writer.VariantContextWriter in project jvarkit by lindenb.
the class VcfLiftOver method doVcfToVcf.
@Override
protected int doVcfToVcf(String inputName, VcfIterator in, VariantContextWriter out) {
VariantContextWriter failed = null;
GenomicSequence genomicSequence = null;
try {
final VCFHeader inputHeader = in.getHeader();
final Set<VCFHeaderLine> headerLines = inputHeader.getMetaDataInInputOrder().stream().filter(V -> {
if (!(V instanceof VCFInfoHeaderLine))
return true;
final VCFInfoHeaderLine vih = VCFInfoHeaderLine.class.cast(V);
if (removeInfo.contains(vih.getID()))
return false;
return true;
}).collect(Collectors.toSet());
if (this.failedFile != null) {
final VCFHeader header2 = new VCFHeader(headerLines, inputHeader.getSampleNamesInOrder());
header2.addMetaDataLine(new VCFHeaderLine(getClass().getSimpleName() + "CmdLine", String.valueOf(getProgramCommandLine())));
header2.addMetaDataLine(new VCFHeaderLine(getClass().getSimpleName() + "Version", String.valueOf(getVersion())));
header2.addMetaDataLine(new VCFInfoHeaderLine(this.failedinfoTag, 1, VCFHeaderLineType.String, "Why the liftOver failed."));
failed = super.openVariantContextWriter(failedFile);
failed.writeHeader(header2);
}
final VCFHeader header3 = new VCFHeader(headerLines, inputHeader.getSampleNamesInOrder());
header3.setSequenceDictionary(this.indexedFastaSequenceFile.getSequenceDictionary());
header3.addMetaDataLine(new VCFHeaderLine(getClass().getSimpleName() + "CmdLine", String.valueOf(getProgramCommandLine())));
header3.addMetaDataLine(new VCFHeaderLine(getClass().getSimpleName() + "Version", String.valueOf(getVersion())));
header3.addMetaDataLine(new VCFInfoHeaderLine(this.infoTag, 1, VCFHeaderLineType.String, "Chromosome|Position before liftOver."));
out.writeHeader(header3);
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(in.getHeader());
while (in.hasNext()) {
VariantContext ctx = progress.watch(in.next());
if (!this.removeInfo.isEmpty()) {
VariantContextBuilder vcb = new VariantContextBuilder(ctx);
for (final String tag : this.removeInfo) vcb.rmAttribute(tag);
ctx = vcb.make();
}
if (ctx.isIndel() && this.ignoreIndels) {
if (failed != null)
failed.add(new VariantContextBuilder(ctx).attribute(this.failedinfoTag, "Indel").make());
continue;
}
if (adaptivematch) {
double minAlleleLength = Math.min(0, ctx.getAlleles().stream().mapToInt(A -> A.length()).min().orElse(0));
double maxAlleleLength = Math.max(1, ctx.getAlleles().stream().mapToInt(A -> A.length()).max().orElse(1));
this.liftOver.setLiftOverMinMatch(minAlleleLength / maxAlleleLength);
}
final Interval lifted = liftOver.liftOver(new Interval(ctx.getContig(), ctx.getStart(), ctx.getEnd(), // negative strand
false, String.join("|", ctx.getContig(), String.valueOf(ctx.getStart()), ctx.getReference().toString())));
if (lifted == null) {
if (failed != null)
failed.add(new VariantContextBuilder(ctx).attribute(this.failedinfoTag, "LiftOverFailed").make());
} else if (this.indexedFastaSequenceFile.getSequenceDictionary().getSequence(lifted.getContig()) == null) {
if (failed != null)
failed.add(new VariantContextBuilder(ctx).attribute(this.failedinfoTag, "ContigMissingDictionary|" + lifted.getContig()).make());
} else {
boolean alleleAreValidatedVsRef = true;
// part of the code was copied from picard/liftovervcf
final Map<Allele, Allele> reverseComplementAlleleMap = new HashMap<>();
final List<Allele> alleles = new ArrayList<Allele>();
for (final Allele oldAllele : ctx.getAlleles()) {
final Allele fixedAllele;
if (oldAllele.isSymbolic() || oldAllele.isNoCall() || oldAllele.equals(Allele.SPAN_DEL)) {
alleles.add(oldAllele);
continue;
} else if (lifted.isPositiveStrand()) {
fixedAllele = oldAllele;
alleles.add(oldAllele);
} else {
fixedAllele = Allele.create(SequenceUtil.reverseComplement(oldAllele.getBaseString()), oldAllele.isReference());
alleles.add(fixedAllele);
reverseComplementAlleleMap.put(oldAllele, fixedAllele);
}
if (this.checkAlleleSequence) {
if (genomicSequence == null || !genomicSequence.getChrom().equals(lifted.getContig())) {
genomicSequence = new GenomicSequence(this.indexedFastaSequenceFile, lifted.getContig());
}
final String alleleStr = fixedAllele.getBaseString();
int x = 0;
while (x < alleleStr.length() && lifted.getStart() - 1 + x < genomicSequence.length()) {
final char refChar = genomicSequence.charAt(lifted.getStart() - 1 + x);
if (Character.toLowerCase(refChar) != Character.toLowerCase(alleleStr.charAt(x))) {
alleleAreValidatedVsRef = false;
break;
}
++x;
}
if (x != alleleStr.length()) {
alleleAreValidatedVsRef = false;
break;
}
}
}
if (!alleleAreValidatedVsRef) {
if (failed != null)
failed.add(new VariantContextBuilder(ctx).attribute(this.failedinfoTag, "AlleleMismatchRef").make());
continue;
}
if (lifted.getEnd() - lifted.getStart() != ctx.getEnd() - ctx.getStart()) {
if (failed != null)
failed.add(new VariantContextBuilder(ctx).attribute(this.failedinfoTag, "AlleleBadLength|" + lifted.length()).make());
continue;
}
final VariantContextBuilder vcb = new VariantContextBuilder(ctx.getSource(), lifted.getContig(), lifted.getStart(), lifted.getEnd(), alleles);
vcb.id(ctx.getID());
vcb.attributes(ctx.getAttributes());
vcb.attribute(this.infoTag, ctx.getContig() + "|" + ctx.getStart() + "|" + ctx.getReference().getDisplayString());
vcb.filters(ctx.getFilters());
vcb.log10PError(ctx.getLog10PError());
final GenotypesContext genotypeContext = ctx.getGenotypes();
final GenotypesContext fixedGenotypes = GenotypesContext.create(genotypeContext.size());
for (final Genotype genotype : genotypeContext) {
final List<Allele> fixedAlleles = new ArrayList<Allele>();
for (final Allele allele : genotype.getAlleles()) {
final Allele fixedAllele = reverseComplementAlleleMap.containsKey(allele) ? reverseComplementAlleleMap.get(allele) : allele;
fixedAlleles.add(fixedAllele);
}
fixedGenotypes.add(new GenotypeBuilder(genotype).alleles(fixedAlleles).make());
}
vcb.genotypes(fixedGenotypes);
out.add(vcb.make());
}
}
if (failed != null) {
failed.close();
failed = null;
}
return RETURN_OK;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(failed);
}
}
use of htsjdk.variant.variantcontext.writer.VariantContextWriter in project jvarkit by lindenb.
the class LumpyMoreSamples method doWork.
@Override
public int doWork(final List<String> args) {
VcfIterator r = null;
VariantContextWriter vcw = null;
final Map<String, SamReader> sample2samreaders = new HashMap<>();
try {
r = super.openVcfIterator(oneFileOrNull(args));
final VCFHeader headerIn = r.getHeader();
final SAMSequenceDictionary dict = headerIn.getSequenceDictionary();
if (dict == null) {
LOG.error(JvarkitException.VcfDictionaryMissing.getMessage("input vcf"));
return -1;
}
final SamReaderFactory samReaderFactory = SamReaderFactory.makeDefault().validationStringency(ValidationStringency.LENIENT);
IOUtil.slurpLines(this.bamFileList).stream().forEach(F -> {
if (F.trim().isEmpty())
return;
final SamReader sr = samReaderFactory.open(SamInputResource.of(F));
final SAMFileHeader samHeader = sr.getFileHeader();
final SAMSequenceDictionary dict2 = samHeader.getSequenceDictionary();
if (dict2 == null) {
throw new JvarkitException.BamDictionaryMissing(F);
}
if (!SequenceUtil.areSequenceDictionariesEqual(dict, dict2)) {
throw new JvarkitException.DictionariesAreNotTheSame(dict, dict2);
}
for (final SAMReadGroupRecord rg : samHeader.getReadGroups()) {
final String sample = rg.getSample();
if (StringUtil.isBlank(sample))
continue;
final SamReader reader = sample2samreaders.get(sample);
if (reader == null) {
sample2samreaders.put(sample, reader);
} else if (reader == sr) {
continue;
} else {
throw new JvarkitException.UserError("more than one sample per bam:" + F);
}
}
});
final Set<String> inVcfSampleNames = new HashSet<>(headerIn.getSampleNamesInOrder());
final Set<String> outVcfSampleNames = new HashSet<>(inVcfSampleNames);
outVcfSampleNames.addAll(sample2samreaders.keySet());
final VCFHeader headerOut = new VCFHeader(headerIn.getMetaDataInInputOrder(), outVcfSampleNames);
final VCFFormatHeaderLine SU2 = new VCFFormatHeaderLine("SU2", 1, VCFHeaderLineType.Integer, "Number of pieces of evidence supporting the variant");
final VCFFormatHeaderLine PE2 = new VCFFormatHeaderLine("PE2", 1, VCFHeaderLineType.Integer, "Number of split reads supporting the variant");
final VCFFormatHeaderLine SR2 = new VCFFormatHeaderLine("SR2", 1, VCFHeaderLineType.Integer, "Number of paired-end reads supporting the variant");
headerOut.addMetaDataLine(SU2);
headerOut.addMetaDataLine(PE2);
headerOut.addMetaDataLine(SR2);
vcw = super.openVariantContextWriter(this.outputFile);
vcw.writeHeader(headerOut);
while (r.hasNext()) {
final VariantContext ctx = r.next();
final StructuralVariantType sttype = ctx.getStructuralVariantType();
if (sttype == null)
continue;
final int tid = dict.getSequenceIndex(ctx.getContig());
final Map<String, Genotype> genotypeMap = new HashMap<>();
ctx.getGenotypes().stream().forEach(G -> genotypeMap.put(G.getSampleName(), G));
for (final String sample : sample2samreaders.keySet()) {
final SamReader samReader = sample2samreaders.get(sample);
final SupportingReads sr = new SupportingReads();
switch(sttype) {
case DEL:
{
int pos = ctx.getStart();
int[] ci = confidenceIntervalPos(ctx);
final QueryInterval left = new QueryInterval(tid, pos - ci[0], pos + ci[1]);
int end = ctx.getEnd();
ci = confidenceIntervalEnd(ctx);
final QueryInterval right = new QueryInterval(tid, end - ci[0], end + ci[1]);
for (final SAMRecord rec : extractSupportingReads(ctx, sample, samReader, new QueryInterval[] { left, right })) {
final Cigar cigar = rec.getCigar();
if (cigar.isLeftClipped()) {
final QueryInterval qi = new QueryInterval(tid, rec.getUnclippedStart(), rec.getStart() - 1);
if (qi.overlaps(left)) {
sr.splitReads++;
if (rec.getReadPairedFlag())
sr.pairedReads++;
}
}
if (cigar.isRightClipped()) {
final QueryInterval qi = new QueryInterval(tid, rec.getEnd() + 1, rec.getUnclippedEnd());
if (qi.overlaps(right)) {
sr.splitReads++;
if (rec.getReadPairedFlag())
sr.pairedReads++;
}
}
}
break;
}
default:
break;
}
final GenotypeBuilder gb;
if (genotypeMap.containsKey(sample)) {
gb = new GenotypeBuilder(genotypeMap.get(sample));
} else {
gb = new GenotypeBuilder(sample, Arrays.asList(Allele.NO_CALL, Allele.NO_CALL));
}
gb.attribute(SR2.getID(), sr.splitReads);
gb.attribute(PE2.getID(), sr.pairedReads);
gb.attribute(SU2.getID(), 0);
genotypeMap.put(sample, gb.make());
}
final VariantContextBuilder vcb = new VariantContextBuilder(ctx);
// add missing samples.
for (final String sampleName : outVcfSampleNames) {
if (genotypeMap.containsKey(sampleName))
continue;
genotypeMap.put(sampleName, new GenotypeBuilder(sampleName, Arrays.asList(Allele.NO_CALL, Allele.NO_CALL)).make());
}
vcb.genotypes(genotypeMap.values());
vcw.add(vcb.make());
}
r.close();
r = null;
sample2samreaders.values().stream().forEach(R -> CloserUtil.close(R));
LOG.info("done");
return 0;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(r);
}
}
Aggregations