use of htsjdk.variant.vcf.VCFHeader in project jvarkit by lindenb.
the class VcfStatsJfx method doWork.
@Override
protected int doWork(final Stage primaryStage, final List<String> args) {
try {
if (args.size() != 1) {
System.err.println("VCF missing");
return -1;
}
final VCFFileReader vcfFileReader = new VCFFileReader(new File(args.get(0)));
final VCFHeader header = vcfFileReader.getFileHeader();
chartGenerators.add(new VariantTypeGenerator());
if (!header.getFilterLines().isEmpty()) {
chartGenerators.add(new FilterUsageGenerator(header.getFilterLines()));
}
final SAMSequenceDictionary dict = header.getSequenceDictionary();
if (dict != null && !dict.isEmpty()) {
chartGenerators.add(new ContigUsageGenerator(dict));
}
if (header.hasGenotypingData()) {
chartGenerators.add(new GenotypeTypeGenerator(header.getGenotypeSamples()));
chartGenerators.add(new AffectedSamplesGenerator());
for (final String sn : header.getSampleNamesInOrder()) {
}
}
final VariantContextRunner runner = new VariantContextRunner(vcfFileReader);
primaryStage.setOnShowing(AE -> {
new Thread(runner).start();
});
primaryStage.setOnCloseRequest(AE -> {
stop = true;
CloserUtil.close(runner);
});
final TabPane tabPane = new TabPane();
chartGenerators.removeIf(G -> !G.isEnabled());
for (final ChartGenerator g : chartGenerators) {
tabPane.getTabs().add(g.makeTab());
}
final Screen scr = Screen.getPrimary();
Scene scene = new Scene(tabPane, scr.getBounds().getWidth() - 100, scr.getBounds().getHeight() - 100);
primaryStage.setScene(scene);
primaryStage.show();
} catch (final Exception err) {
err.printStackTrace();
return -1;
} finally {
}
return 0;
}
use of htsjdk.variant.vcf.VCFHeader in project jvarkit by lindenb.
the class VcfUcsc method doVcfToVcf.
@Override
protected int doVcfToVcf(final String inputName, final VcfIterator in, final VariantContextWriter out) {
final String TAG;
if (StringUtil.isBlank(this.infoTag)) {
TAG = "UCSC_" + database.toUpperCase() + "_" + table.toUpperCase();
} else {
TAG = this.infoTag;
}
VCFHeader header = in.getHeader();
final VCFHeader h2 = new VCFHeader(header);
h2.addMetaDataLine(new VCFInfoHeaderLine(TAG, VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, database + "." + table));
if (!StringUtil.isBlank(this.filterIn)) {
h2.addMetaDataLine(new VCFFilterHeaderLine(this.filterIn, "Set by " + this.getClass().getName()));
} else if (!StringUtil.isBlank(this.filterOut)) {
h2.addMetaDataLine(new VCFFilterHeaderLine(this.filterOut, "Set by " + this.getClass().getName()));
}
h2.addMetaDataLine(new VCFHeaderLine(getClass().getSimpleName() + "CmdLine", String.valueOf(getProgramCommandLine())));
h2.addMetaDataLine(new VCFHeaderLine(getClass().getSimpleName() + "Version", String.valueOf(getVersion())));
out.writeHeader(h2);
final Map<Integer, PreparedStatement> bin2pstmt = new HashMap<>();
try {
if (!this.has_bin_column) {
bin2pstmt.put(0, createPreparedStatement(0));
}
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(header).logger(LOG);
while (in.hasNext()) {
final VariantContext ctx = progress.watch(in.next());
int start0, end0;
if (// mutation starts *after* the base
ctx.isIndel()) {
start0 = ctx.getStart();
end0 = ctx.getEnd();
} else {
start0 = ctx.getStart() - 1;
end0 = ctx.getEnd();
}
// extends left/right
start0 = Math.max(0, start0 - this.extend_bases);
end0 += this.extend_bases;
final Set<String> atts = new HashSet<String>();
if (this.has_bin_column) {
final List<Integer> binList = reg2bins(start0, end0);
PreparedStatement pstmt = bin2pstmt.get(binList.size());
if (pstmt == null) {
LOG.debug("create prepared statemement for bin.size=" + binList.size() + "[" + start0 + ":" + end0 + "]");
pstmt = createPreparedStatement(binList.size());
bin2pstmt.put(binList.size(), pstmt);
}
initPstmt(pstmt, ctx.getContig(), start0, end0);
for (int x = 0; x < binList.size(); ++x) {
pstmt.setInt(4 + x, binList.get(x));
}
select(atts, pstmt);
} else {
// already defined
final PreparedStatement pstmt = bin2pstmt.get(0);
initPstmt(pstmt, ctx.getContig(), start0, end0);
select(atts, pstmt);
}
if (atts.isEmpty() && StringUtil.isBlank(this.filterIn) && StringUtil.isBlank(this.filterOut)) {
out.add(ctx);
continue;
}
final VariantContextBuilder vcb = new VariantContextBuilder(ctx);
if (!StringUtil.isBlank(this.filterIn) && !atts.isEmpty()) {
vcb.filter(this.filterIn);
} else if (!StringUtil.isBlank(this.filterOut) && atts.isEmpty()) {
vcb.filter(this.filterOut);
}
if (!atts.isEmpty()) {
vcb.attribute(TAG, atts.toArray());
}
out.add(vcb.make());
}
progress.finish();
return 0;
} catch (final SQLException err) {
LOG.error(err);
throw new RuntimeException("SQLError", err);
} finally {
for (final PreparedStatement pstmt : bin2pstmt.values()) {
CloserUtil.close(pstmt);
}
bin2pstmt.clear();
}
}
use of htsjdk.variant.vcf.VCFHeader in project jvarkit by lindenb.
the class VCFFixIndels method doVcfToVcf.
@Override
protected int doVcfToVcf(String inputName, VcfIterator r, VariantContextWriter w) {
long nChanged = 0L;
final String TAG = "INDELFIXED";
final VCFHeader header = r.getHeader();
final VCFHeader h2 = new VCFHeader(header);
addMetaData(h2);
h2.addMetaDataLine(new VCFInfoHeaderLine(TAG, 1, VCFHeaderLineType.String, "Fix Indels for @SolenaLS (position|alleles...)"));
w.writeHeader(h2);
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(header);
while (r.hasNext()) {
boolean somethingChanged = false;
VariantContext ctx = progress.watch(r.next());
/* map old allele to new allele */
Map<Allele, Allele> original2modified = new HashMap<Allele, Allele>();
/* did we found a strange allele (symbolic, etc ) */
boolean strange_allele_flag = false;
for (final Allele a : ctx.getAlleles()) {
original2modified.put(a, a);
if (a.isSymbolic() || a.isNoCall()) {
strange_allele_flag = true;
break;
}
}
if (strange_allele_flag || original2modified.size() < 2) /* at least 2 alleles: REF+ ALT */
{
w.add(ctx);
continue;
}
/* record chromStart if we need to shift to the right */
int chromStart = ctx.getStart();
/* trim right then left */
for (int side = 0; side < 2; ++side) {
boolean done = false;
while (!done) {
boolean ok_side = true;
/* common nucleotide at end/start */
Character targetChar = null;
done = true;
// scan side
Set<Allele> keys = new HashSet<>(original2modified.keySet());
for (Allele k : keys) {
Allele newAllele = original2modified.get(k);
if (newAllele.isSymbolic()) {
ok_side = false;
break;
}
String baseString = newAllele.getBaseString().trim().toUpperCase();
if (baseString.length() < 2) {
ok_side = false;
break;
}
/* first or last char or all sequences
* side==0 : right
* side==1 : left
* */
Character baseChar = (side == 0 ? baseString.charAt(baseString.length() - 1) : baseString.charAt(0));
if (targetChar == null) {
targetChar = baseChar;
} else if (!targetChar.equals(baseChar)) {
/* doesn't end with same nucleotide */
ok_side = false;
break;
}
}
/* ok we can shift all alleles */
if (ok_side && targetChar != null) {
done = false;
somethingChanged = true;
for (Allele k : keys) {
Allele newAllele = original2modified.get(k);
String baseString = newAllele.getBaseString().trim().toUpperCase();
if (// remove last nucleotide
side == 0) {
newAllele = Allele.create(baseString.substring(0, baseString.length() - 1), newAllele.isReference());
} else {
newAllele = Allele.create(baseString.substring(1), newAllele.isReference());
}
original2modified.put(k, newAllele);
}
if (side == 1)
chromStart++;
}
}
/* end of while done */
}
if (!somethingChanged) {
w.add(ctx);
continue;
}
final VariantContextBuilder b = new VariantContextBuilder(ctx);
b.start(chromStart);
Allele newRef = original2modified.get(ctx.getReference());
b.stop(chromStart + newRef.getBaseString().length() - 1);
b.alleles(original2modified.values());
List<Genotype> genotypes = new ArrayList<>();
for (final String sample : header.getSampleNamesInOrder()) {
final Genotype g = ctx.getGenotype(sample);
if (!g.isCalled()) {
genotypes.add(g);
continue;
}
final GenotypeBuilder gb = new GenotypeBuilder(g);
final List<Allele> aL = new ArrayList<>();
for (final Allele a : g.getAlleles()) {
aL.add(original2modified.get(a));
}
gb.alleles(aL);
genotypes.add(gb.make());
}
StringBuilder tagContent = new StringBuilder();
tagContent.append(String.valueOf(ctx.getStart()));
for (Allele a : ctx.getAlleles()) {
tagContent.append("|");
tagContent.append(a.toString());
}
b.attribute(TAG, tagContent.toString());
b.genotypes(genotypes);
w.add(b.make());
++nChanged;
if (w.checkError())
break;
}
progress.finish();
LOG.info("indels changed:" + nChanged);
return RETURN_OK;
}
use of htsjdk.variant.vcf.VCFHeader in project jvarkit by lindenb.
the class VcfOffsetsIndexFactory method indexVcfFile.
/**
* index a vcf file for its variant offsets
*/
public File indexVcfFile(final File vcfFile, final File indexFile) throws IOException {
LOG.info("indexing " + vcfFile);
IOUtil.assertFileIsReadable(vcfFile);
DataOutputStream daos = null;
BlockCompressedInputStream bgzin = null;
AsciiLineReader ascii = null;
VCFHeader header = null;
final VCFCodec codec = new VCFCodec();
SAMSequenceDictionaryProgress progress = null;
try {
daos = new DataOutputStream(new FileOutputStream(indexFile));
daos.write(MAGIC);
if (vcfFile.getName().endsWith(".vcf.gz")) {
bgzin = new BlockCompressedInputStream(vcfFile);
ascii = null;
} else if (vcfFile.getName().endsWith(".vcf")) {
bgzin = null;
ascii = new AsciiLineReader(new FileInputStream(vcfFile));
} else {
throw new IllegalArgumentException("not a vcf.gz or vcf file: " + vcfFile);
}
final List<String> headerLines = new ArrayList<>();
for (; ; ) {
final long offset = (ascii == null ? bgzin.getPosition() : ascii.getPosition());
final String line = (ascii == null ? bgzin.readLine() : ascii.readLine());
if (line == null)
break;
if (line.startsWith("#")) {
headerLines.add(line);
if (line.startsWith("#CHROM")) {
codec.readHeader(new LineIterator() {
int i = 0;
@Override
public String next() {
final String s = headerLines.get(i);
i++;
return s;
}
@Override
public boolean hasNext() {
return i < headerLines.size();
}
@Override
public String peek() {
return i < headerLines.size() ? headerLines.get(i) : null;
}
});
header = VCFUtils.parseHeader(headerLines).header;
progress = new SAMSequenceDictionaryProgress(header);
progress.logger(this.logger == null ? LOG : this.logger);
progress.setLogPrefix("indexing");
}
continue;
}
if (progress == null) {
throw new JvarkitException.FileFormatError("no vcf header in " + vcfFile);
}
final VariantContext ctx = codec.decode(line);
progress.watch(ctx);
if (this.acceptVariant != null) {
if (!acceptVariant.test(ctx))
continue;
}
daos.writeLong(offset);
}
if (progress == null) {
throw new JvarkitException.FileFormatError("no vcf header in " + vcfFile);
}
progress.finish();
daos.flush();
daos.close();
return indexFile;
} catch (final IOException err) {
throw err;
} finally {
CloserUtil.close(ascii);
CloserUtil.close(bgzin);
CloserUtil.close(daos);
}
}
use of htsjdk.variant.vcf.VCFHeader in project jvarkit by lindenb.
the class VCFComm method doWork.
@Override
public int doWork(final List<String> args) {
CloseableIterator<LineAndFile> iter = null;
SortingCollection<LineAndFile> variants = null;
VariantContextWriter w = null;
try {
if (args.isEmpty()) {
LOG.error("Illegal number of arguments");
return -1;
}
Set<VCFHeaderLine> metaData = new HashSet<VCFHeaderLine>();
variants = SortingCollection.newInstance(LineAndFile.class, new LineAndFileCodec(), new LineAndFileComparator(), super.sortingCollectionArgs.getMaxRecordsInRam(), super.sortingCollectionArgs.getTmpPaths());
variants.setDestructiveIteration(true);
/**
* new sample names in the output vcf: one sample per file
*/
final Map<Integer, String> fileid2sampleName = new TreeMap<>();
/**
* samples names as they appear in the original VCF headers
*/
final Counter<String> countInputSamples = new Counter<String>();
/**
* dicts
*/
final List<SAMSequenceDictionary> all_dictionaries = new ArrayList<>();
for (final String vcffilename : IOUtils.unrollFiles(args)) {
LOG.info("Reading from " + vcffilename);
final Input input = super.put(variants, vcffilename);
String sampleName = vcffilename;
if (sampleName.endsWith(".vcf.gz")) {
sampleName = sampleName.substring(0, sampleName.length() - 7);
} else if (sampleName.endsWith(".vcf.gz")) {
sampleName = sampleName.substring(0, sampleName.length() - 4);
}
int slash = sampleName.lastIndexOf(File.separatorChar);
if (slash != -1)
sampleName = sampleName.substring(slash + 1);
int suffix = 1;
// loop until we find a uniq name
for (; ; ) {
final String key = sampleName + (suffix == 1 ? "" : "_" + suffix);
if (fileid2sampleName.values().contains(key)) {
suffix++;
continue;
}
fileid2sampleName.put(input.file_id, key);
metaData.add(new VCFHeaderLine(key, vcffilename));
break;
}
for (final String sname : input.codecAndHeader.header.getSampleNamesInOrder()) {
countInputSamples.incr(sname);
}
all_dictionaries.add(input.codecAndHeader.header.getSequenceDictionary());
}
variants.doneAdding();
/**
* unique sample name, if any present in all VCF
*/
Optional<String> unqueSampleName = Optional.empty();
if (countInputSamples.getCountCategories() == 1 && countInputSamples.count(countInputSamples.keySet().iterator().next()) == fileid2sampleName.size()) {
unqueSampleName = Optional.of(countInputSamples.keySet().iterator().next());
LOG.info("Unique sample name is " + unqueSampleName.get());
}
VCFStandardHeaderLines.addStandardFormatLines(metaData, true, VCFConstants.DEPTH_KEY, VCFConstants.GENOTYPE_QUALITY_KEY, VCFConstants.GENOTYPE_KEY, VCFConstants.GENOTYPE_FILTER_KEY);
VCFStandardHeaderLines.addStandardInfoLines(metaData, true, VCFConstants.DEPTH_KEY, VCFConstants.ALLELE_COUNT_KEY, VCFConstants.ALLELE_NUMBER_KEY);
metaData.add(new VCFHeaderLine(getClass().getSimpleName(), "version:" + getVersion() + " command:" + getProgramCommandLine()));
final VCFFilterHeaderLine variantNotCalledInAllVcf = new VCFFilterHeaderLine("NotCalledEveryWhere", "Variant was NOT called in all input VCF");
metaData.add(variantNotCalledInAllVcf);
final VCFFilterHeaderLine variantWasFiltered = new VCFFilterHeaderLine("VariantWasFiltered", "At least one variant was filtered");
metaData.add(variantWasFiltered);
final VCFFormatHeaderLine variantQUALFormat = new VCFFormatHeaderLine("VCQUAL", 1, VCFHeaderLineType.Float, "Variant Quality");
metaData.add(variantQUALFormat);
metaData.add(new VCFFormatHeaderLine(VCFConstants.ALLELE_NUMBER_KEY, 1, VCFHeaderLineType.Integer, "Number of allle in the src vcf"));
metaData.add(new VCFFormatHeaderLine(VCFConstants.ALLELE_COUNT_KEY, 1, VCFHeaderLineType.Integer, "Number of ALT alllele"));
final VCFInfoHeaderLine foundInCountVcfInfo = new VCFInfoHeaderLine("NVCF", 1, VCFHeaderLineType.Integer, "Number of VCF this variant was found");
metaData.add(foundInCountVcfInfo);
final VCFInfoHeaderLine variantTypesInfo = new VCFInfoHeaderLine("VTYPES", VCFHeaderLineCount.UNBOUNDED, VCFHeaderLineType.String, "Distinct Variants type");
metaData.add(variantTypesInfo);
final VCFFilterHeaderLine multipleTypeFilters = new VCFFilterHeaderLine("DiscordantTypes", "Discordant types at this position");
metaData.add(multipleTypeFilters);
final VCFFormatHeaderLine variantTypeFormat = new VCFFormatHeaderLine("VTYPE", 1, VCFHeaderLineType.String, "Variant Type");
metaData.add(variantTypeFormat);
final VCFFilterHeaderLine uniqueVariantDiscordantGTFilter;
if (unqueSampleName.isPresent()) {
metaData.add(new VCFHeaderLine("UniqSample", unqueSampleName.get()));
uniqueVariantDiscordantGTFilter = new VCFFilterHeaderLine("DiscordantGenotypeForUniqSample", "Genotype Dicordant for for sample " + unqueSampleName.get());
metaData.add(uniqueVariantDiscordantGTFilter);
} else {
uniqueVariantDiscordantGTFilter = null;
}
final VCFHeader header = new VCFHeader(metaData, new ArrayList<>(fileid2sampleName.values()));
if (// all have a dict
!normalize_chr && !all_dictionaries.contains(null)) {
SAMSequenceDictionary thedict = null;
for (int x = 0; x < all_dictionaries.size(); ++x) {
SAMSequenceDictionary d = all_dictionaries.get(x);
if (thedict == null) {
thedict = d;
} else if (!SequenceUtil.areSequenceDictionariesEqual(d, thedict)) {
thedict = null;
break;
}
}
if (thedict != null)
header.setSequenceDictionary(thedict);
}
w = super.openVariantContextWriter(super.outputFile);
w.writeHeader(header);
final List<LineAndFile> row = new ArrayList<LineAndFile>(super.inputs.size());
final Comparator<LineAndFile> posCompare = (A, B) -> A.getContigPosRef().compareTo(B.getContigPosRef());
iter = variants.iterator();
for (; ; ) {
LineAndFile rec = null;
if (iter.hasNext()) {
rec = iter.next();
}
if (rec == null || (!row.isEmpty() && posCompare.compare(row.get(0), rec) != 0)) {
if (!row.isEmpty()) {
final VariantContext first = row.get(0).getContext();
/* in which file id we find this variant */
Set<Integer> fileids_for_variant = row.stream().map(LAF -> LAF.fileIdx).collect(Collectors.toSet());
// see with HAS multiple chrom/pos/ref but different alt
if (row.size() != fileids_for_variant.size()) {
for (; ; ) {
boolean ok = true;
for (int x = 0; ok && x + 1 < row.size(); ++x) {
final VariantContext ctxx = row.get(x).getContext();
final List<Allele> altsx = ctxx.getAlternateAlleles();
for (int y = x + 1; ok && y < row.size(); ++y) {
if (row.get(x).fileIdx != row.get(y).fileIdx)
continue;
final VariantContext ctxy = row.get(y).getContext();
final List<Allele> altsy = ctxy.getAlternateAlleles();
if (altsx.equals(altsy))
continue;
if (!ctxx.isVariant() && ctxy.isVariant()) {
row.remove(x);
} else if (ctxx.isVariant() && !ctxy.isVariant()) {
row.remove(y);
} else if (!ctxx.isSNP() && ctxy.isSNP()) {
row.remove(x);
} else if (ctxx.isSNP() && !ctxy.isSNP()) {
row.remove(y);
} else if (altsx.size() > altsy.size()) {
row.remove(x);
} else if (altsx.size() < altsy.size()) {
row.remove(y);
} else {
row.remove(y);
}
ok = false;
break;
}
}
if (ok)
break;
}
fileids_for_variant = row.stream().map(LAF -> LAF.fileIdx).collect(Collectors.toSet());
}
if (row.size() != fileids_for_variant.size()) {
LOG.error("There are some duplicated variants at the position " + new ContigPosRef(first) + " in the same vcf file");
for (final LineAndFile laf : row) {
LOG.error("File [" + laf.fileIdx + "]" + fileid2sampleName.get(laf.fileIdx));
LOG.error("\t" + laf.getContigPosRef());
}
row.clear();
} else {
final Set<Allele> alleles = row.stream().flatMap(R -> R.getContext().getAlleles().stream()).collect(Collectors.toSet());
final VariantContextBuilder vcb = new VariantContextBuilder(getClass().getName(), first.getContig(), first.getStart(), first.getEnd(), alleles);
final Set<String> filters = new HashSet<>();
final Set<VariantContext.Type> variantContextTypes = new HashSet<>();
final List<Genotype> genotypes = new ArrayList<Genotype>();
for (final LineAndFile laf : row) {
if (laf.getContext().isFiltered())
filters.add(variantWasFiltered.getID());
variantContextTypes.add(laf.getContext().getType());
final GenotypeBuilder gbuilder = new GenotypeBuilder();
gbuilder.name(fileid2sampleName.get(laf.fileIdx));
if (unqueSampleName.isPresent()) {
final Genotype g0 = laf.getContext().getGenotype(unqueSampleName.get());
if (g0 == null) {
iter.close();
w.close();
throw new IllegalStateException("Cannot find genotype for " + unqueSampleName.get());
}
if (g0.hasDP())
gbuilder.DP(g0.getDP());
if (g0.hasGQ())
gbuilder.GQ(g0.getGQ());
gbuilder.alleles(g0.getAlleles());
} else {
gbuilder.alleles(Arrays.asList(first.getReference(), first.getReference()));
if (laf.getContext().hasAttribute(VCFConstants.DEPTH_KEY)) {
gbuilder.DP(laf.getContext().getAttributeAsInt(VCFConstants.DEPTH_KEY, 0));
}
}
if (laf.getContext().isFiltered()) {
gbuilder.filter("VCFFILTERED");
}
if (laf.getContext().hasLog10PError()) {
gbuilder.attribute(variantQUALFormat.getID(), laf.getContext().getPhredScaledQual());
}
gbuilder.attribute(VCFConstants.ALLELE_NUMBER_KEY, laf.getContext().getGenotypes().stream().flatMap(G -> G.getAlleles().stream()).filter(A -> !A.isNoCall()).count());
gbuilder.attribute(VCFConstants.ALLELE_COUNT_KEY, laf.getContext().getGenotypes().stream().flatMap(G -> G.getAlleles().stream()).filter(A -> !(A.isReference() || A.isNoCall())).count());
gbuilder.attribute(variantTypeFormat.getID(), laf.getContext().getType().name());
genotypes.add(gbuilder.make());
}
final String id = String.join(";", row.stream().map(LAF -> LAF.getContext()).filter(V -> V.hasID()).map(V -> V.getID()).collect(Collectors.toSet()));
if (!id.isEmpty())
vcb.id(id);
vcb.genotypes(genotypes);
if (unqueSampleName.isPresent()) {
boolean all_same = true;
for (int x = 0; all_same && x + 1 < genotypes.size(); ++x) {
if (!genotypes.get(x).isCalled())
continue;
for (int y = x + 1; all_same && y < genotypes.size(); ++y) {
if (!genotypes.get(y).isCalled())
continue;
if (!genotypes.get(x).sameGenotype(genotypes.get(y), true)) {
all_same = false;
break;
}
}
}
if (!all_same)
filters.add(uniqueVariantDiscordantGTFilter.getID());
}
// Add AN
vcb.attribute(VCFConstants.ALLELE_NUMBER_KEY, genotypes.stream().filter(G -> G.isCalled()).mapToInt(G -> G.getAlleles().size()).sum());
if (!variantContextTypes.isEmpty()) {
vcb.attribute(variantTypesInfo.getID(), new ArrayList<>(variantContextTypes.stream().map(T -> T.name()).collect(Collectors.toSet())));
if (variantContextTypes.size() > 1) {
filters.add(multipleTypeFilters.getID());
}
}
vcb.attribute(foundInCountVcfInfo.getID(), fileids_for_variant.size());
boolean print = true;
if (row.size() == super.inputs.size() && ignore_everywhere) {
print = false;
}
if (fileids_for_variant.size() != fileid2sampleName.size()) {
filters.add(variantNotCalledInAllVcf.getID());
if (only_everywhere) {
print = false;
}
}
vcb.filters(filters);
if (print) {
w.add(vcb.make());
}
}
row.clear();
}
if (rec == null)
break;
}
row.add(rec);
}
iter.close();
iter = null;
w.close();
w = null;
return 0;
} catch (Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(iter);
CloserUtil.close(w);
try {
if (variants != null)
variants.cleanup();
} catch (Exception err) {
}
}
}
Aggregations