use of htsjdk.variant.variantcontext.writer.VariantContextWriter in project jvarkit by lindenb.
the class VcfTail method doVcfToVcf.
@Override
protected int doVcfToVcf(final String inputName, final VcfIterator in, final VariantContextWriter delegate) {
try {
final VariantContextWriter out = this.component.open(delegate);
out.writeHeader(in.getHeader());
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(in.getHeader()).logger(LOG);
while (in.hasNext()) {
out.add(progress.watch(in.next()));
}
progress.finish();
out.close();
return 0;
} catch (final Exception err) {
LOG.error(err);
return -1;
}
}
use of htsjdk.variant.variantcontext.writer.VariantContextWriter in project jvarkit by lindenb.
the class VcfOptimizePedForSkat method exec.
private void exec(final long generation, final VCFHeader header, final List<VariantContext> variants, final List<Pedigree.Person> samples, final SkatFactory.SkatExecutor skatExecutor) {
final Solution solution = new Solution();
solution.generation = generation;
String origin = "random";
if (generation != 0) {
int nRemove = 1 + this.random.nextInt(this.nSamplesRemove);
if (generation == 1 && this.bootstrapSamples != null) {
origin = "bootstrap";
for (final String sample : this.bootstrapSamples.split("[; ,]")) {
if (StringUtil.isBlank(sample))
continue;
if (!samples.stream().anyMatch(S -> S.getId().equals(sample))) {
throw new JvarkitException.UserError("Sample " + sample + " not found in effective pedigree.");
}
LOG.info("bootstraping with " + sample);
solution.sampleSet.add(sample);
}
} else if (generation % 5 == 0 && !this.bestSolutions.isEmpty()) {
int sol_index = this.random.nextInt(Math.min(this.max_results_output, this.bestSolutions.size()));
final List<String> list = new ArrayList<>(this.bestSolutions.get(sol_index).sampleSet);
if (list.size() > 1 && this.random.nextBoolean()) {
origin = "best-minus-random";
list.remove(this.random.nextInt(list.size()));
} else if (list.size() < nRemove) {
origin = "best-plus-random";
list.add(samples.get(this.random.nextInt(samples.size())).getId());
}
solution.sampleSet.addAll(list);
} else if (generation % 7 == 0 && this.bestSolutions.size() > 2) {
final Set<String> set = new HashSet<>(this.bestSolutions.get(0).sampleSet);
set.addAll(this.bestSolutions.get(1).sampleSet);
final List<String> bestsamples = new ArrayList<>(set);
Collections.shuffle(bestsamples, this.random);
while (bestsamples.size() > nRemove) {
bestsamples.remove(0);
}
solution.sampleSet.addAll(bestsamples);
origin = "best0-plus-best1";
} else {
while (nRemove > 0) {
final String sampleId;
if (generation % 3 == 0L && nRemove % 2 == 0 && this.bestSolutions.size() > 0 && !this.bestSolutions.get(0).sampleSet.isEmpty()) {
final List<String> bestsamples = new ArrayList<>(this.bestSolutions.get(0).sampleSet);
sampleId = bestsamples.get(this.random.nextInt(bestsamples.size()));
origin = "random-plus-best0";
} else {
sampleId = samples.get(this.random.nextInt(samples.size())).getId();
}
solution.sampleSet.add(sampleId);
nRemove--;
}
}
} else {
origin = "original";
}
if (generation > 0 && solution.sampleSet.isEmpty())
return;
while (solution.sampleSet.size() > this.nSamplesRemove) {
LOG.warn("Hum... to many for " + origin);
final List<String> L = new ArrayList<>(solution.sampleSet);
while (L.size() > 0 && L.size() > this.nSamplesRemove) L.remove(this.random.nextInt(L.size()));
solution.sampleSet.clear();
solution.sampleSet.addAll(L);
}
if (this.bestSolutions.contains(solution))
return;
solution.origin = origin;
final List<Pedigree.Person> ped2 = new ArrayList<>(samples);
ped2.removeIf(I -> solution.sampleSet.contains(I.getId()));
if (ped2.isEmpty())
return;
if (!ped2.stream().anyMatch(P -> P.isAffected()))
return;
if (!ped2.stream().anyMatch(P -> P.isUnaffected()))
return;
// test MAF et Fisher
final VCFHeader header2 = new VCFHeader(header.getMetaDataInInputOrder(), header.getSampleNamesInOrder());
final VcfBurdenFisherH.CtxWriterFactory fisherhFactory = new VcfBurdenFisherH.CtxWriterFactory();
fisherhFactory.setCaseControlExtractor((H) -> new HashSet<>(ped2));
fisherhFactory.setIgnoreFiltered(true);
final VcfBurdenMAF.CtxWriterFactory mafFactory = new VcfBurdenMAF.CtxWriterFactory();
mafFactory.setCaseControlExtractor((H) -> new HashSet<>(ped2));
mafFactory.setIgnoreFiltered(true);
final VariantCollector collector = new VariantCollector();
VariantContextWriter vcw = mafFactory.open(collector);
vcw = fisherhFactory.open(vcw);
vcw.writeHeader(header2);
for (final VariantContext vc : variants) {
vcw.add(vc);
}
vcw.close();
CloserUtil.close(fisherhFactory);
CloserUtil.close(mafFactory);
//
solution.result = skatExecutor.execute(collector.variants, ped2);
if (solution.result.isError())
return;
if (this.bestSolutions.isEmpty() || solution.compareTo(this.bestSolutions.get(this.bestSolutions.size() - 1)) < 0) {
this.bestSolutions.add(solution);
if (this.firstScore == null) {
this.firstScore = solution.result.getPValue();
}
Collections.sort(this.bestSolutions);
final int BUFFER_RESULT = 1000;
while (this.bestSolutions.size() > BUFFER_RESULT) {
this.bestSolutions.remove(this.bestSolutions.size() - 1);
}
if (this.bestSolutions.indexOf(solution) < this.max_results_output) {
final StringBuilder sb = new StringBuilder();
sb.append(">>> ").append(generation).append("\n");
this.bestSolutions.stream().limit(this.max_results_output).forEach(S -> sb.append(S).append("\n"));
sb.append("<<< ").append(generation).append("\n");
if (outputFile == null) {
stdout().println(sb.toString());
} else {
stderr().println(sb.toString());
IOUtils.cat(sb.toString(), this.outputFile, false);
}
}
}
}
use of htsjdk.variant.variantcontext.writer.VariantContextWriter in project jvarkit by lindenb.
the class VcfIndexTabix method doVcfToVcf.
private int doVcfToVcf(String inputName, final VcfIterator vcfIn, final File outFile) throws IOException {
SortingVCFWriter sortingVCW = null;
VariantContextWriterBuilder vcwb = new VariantContextWriterBuilder();
VariantContextWriter w = null;
try {
SAMSequenceDictionary dict = vcfIn.getHeader().getSequenceDictionary();
if (dict != null)
vcwb.setReferenceDictionary(dict);
vcwb.setOutputFile(outFile);
vcwb.setOutputFileType(OutputType.BLOCK_COMPRESSED_VCF);
w = vcwb.build();
if (this.sort) {
LOG.info("Creating a sorting writer");
sortingVCW = new SortingVCFWriter(w);
w = sortingVCW;
}
w.writeHeader(vcfIn.getHeader());
SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(vcfIn.getHeader());
while (vcfIn.hasNext()) {
w.add(progress.watch(vcfIn.next()));
}
progress.finish();
w.close();
w = null;
return RETURN_OK;
} catch (Exception e) {
if (outFile.exists() && outFile.isFile()) {
LOG.warn("Deleting " + outFile);
outFile.delete();
File tbi = new File(outFile.getPath() + TabixUtils.STANDARD_INDEX_EXTENSION);
if (tbi.exists() && tbi.isFile())
tbi.delete();
}
LOG.error(e);
return -1;
} finally {
CloserUtil.close(w);
}
}
use of htsjdk.variant.variantcontext.writer.VariantContextWriter in project jvarkit by lindenb.
the class VcfMultiToOneAllele method doVcfToVcf.
@Override
public int doVcfToVcf(final String inputName, final VcfIterator r, final VariantContextWriter delegate) {
final VariantContextWriter w = this.component.open(delegate);
w.writeHeader(r.getHeader());
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(r.getHeader()).logger(LOG);
while (r.hasNext()) {
w.add(progress.watch(r.next()));
}
progress.finish();
w.close();
return 0;
}
use of htsjdk.variant.variantcontext.writer.VariantContextWriter in project jvarkit by lindenb.
the class VcfRemoveUnusedAlt method doVcfToVcf.
@Override
protected int doVcfToVcf(final String inputName, final VcfIterator in, final VariantContextWriter delegate) {
try {
final VariantContextWriter out = this.component.open(delegate);
out.writeHeader(in.getHeader());
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(in.getHeader()).logger(LOG);
while (in.hasNext()) {
out.add(progress.watch(in.next()));
}
progress.finish();
out.close();
return 0;
} catch (final Exception err) {
LOG.error(err);
return -1;
}
}
Aggregations