use of com.github.lindenb.jvarkit.util.vcf.VcfTools in project jvarkit by lindenb.
the class VcfGeneEpistasis method doWork.
@Override
public int doWork(final List<String> args) {
if (this.geneBed == null) {
LOG.error("gene file bed undefined");
return -1;
}
if (this.outputFile == null) {
LOG.error("output file undefined");
return -1;
}
CloseableIterator<VariantContext> iter = null;
try {
final File vcfFile = new File(oneAndOnlyOneFile(args));
this.vcfFileReader = new VCFFileReader(vcfFile, true);
final VCFHeader header = this.vcfFileReader.getFileHeader();
final Pedigree pedigree;
if (this.pedigreeFile != null) {
pedigree = new Pedigree.Parser().parse(this.pedigreeFile);
} else {
pedigree = new Pedigree.Parser().parse(header);
}
if (pedigree == null || pedigree.isEmpty() || !pedigree.hasAffected() || !pedigree.hasUnaffected()) {
LOG.error("empty ped or no case/ctrl");
return -1;
}
pedigree.verifyPersonsHaveUniqueNames();
for (final Pedigree.Person p : pedigree.getPersons().stream().filter(P -> P.isAffected() || P.isUnaffected()).filter(P -> header.getSampleNamesInOrder().contains(P.getId())).collect(Collectors.toSet())) {
this.id2samples.put(p.getId(), p);
}
this.vcfTools = new VcfTools(header);
List<Interval> geneList;
if (!this.geneBed.exists()) {
final Map<String, Interval> gene2interval = new HashMap<>(50000);
LOG.info("building gene file" + this.geneBed);
iter = this.vcfFileReader.iterator();
// iter = this.vcfFileReader.query("chr3",1,300_000_000);
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(header).logger(LOG);
while (iter.hasNext()) {
final VariantContext ctx = progress.watch(iter.next());
if (!accept(ctx))
continue;
for (final String geneName : getGenes(ctx)) {
final Interval old = gene2interval.get(geneName);
if (old == null) {
gene2interval.put(geneName, new Interval(ctx.getContig(), ctx.getStart(), ctx.getEnd(), false, geneName));
LOG.info("adding " + geneName + ". number of genes: " + gene2interval.size());
} else if (!old.getContig().equals(ctx.getContig())) {
LOG.error("boum :" + geneName + ": on chrom " + ctx.getContig() + " vs " + old);
return -1;
} else {
gene2interval.put(geneName, new Interval(ctx.getContig(), Math.min(ctx.getStart(), old.getStart()), Math.max(ctx.getEnd(), old.getEnd()), false, geneName));
}
}
}
iter.close();
iter = null;
progress.finish();
geneList = new ArrayList<>(gene2interval.values());
PrintWriter pw = new PrintWriter(this.geneBed);
for (final Interval g : geneList) {
pw.println(g.getContig() + "\t" + (g.getStart() - 1) + "\t" + (g.getEnd()) + "\t" + g.getName());
}
pw.flush();
pw.close();
pw = null;
} else {
BedLineCodec codec = new BedLineCodec();
BufferedReader r = IOUtil.openFileForBufferedReading(geneBed);
geneList = r.lines().map(L -> codec.decode(L)).filter(B -> B != null).map(B -> new Interval(B.getContig(), B.getStart(), B.getEnd(), true, B.get(3))).collect(Collectors.toList());
r.close();
}
if (geneList.isEmpty()) {
LOG.error("gene List is empty");
return -1;
}
final Comparator<VariantContext> ctxSorter = VCFUtils.createTidPosRefComparator(header.getSequenceDictionary());
final Function<Interval, List<VariantContext>> loadVariants = (R) -> {
List<VariantContext> L = new ArrayList<>();
CloseableIterator<VariantContext> r = this.vcfFileReader.query(R.getContig(), R.getStart(), R.getEnd());
while (r.hasNext()) {
final VariantContext ctx = r.next();
if (!accept(ctx))
continue;
if (!getGenes(ctx).contains(R.getName()))
continue;
L.add(ctx);
}
r.close();
return L;
};
final SkatExecutor executor = this.skatFactory.build();
Double bestSkat = null;
LOG.info("number of genes : " + geneList.size());
final int list_end_index = (this.user_end_index < 0 ? geneList.size() : Math.min(geneList.size(), this.user_end_index));
for (int x = this.user_begin_index; x < list_end_index; ++x) {
final Interval intervalx = geneList.get(x);
final List<VariantContext> variantsx = loadVariants.apply(intervalx);
if (variantsx.isEmpty())
continue;
for (int y = x; y < geneList.size(); /* pas list_end_index */
++y) {
final Interval intervaly;
final List<VariantContext> merge;
if (y == x) {
// we-re testing gene 1 only
intervaly = intervalx;
merge = variantsx;
} else {
intervaly = geneList.get(y);
if (intervaly.intersects(intervalx))
continue;
final List<VariantContext> variantsy = loadVariants.apply(intervaly);
if (variantsy.isEmpty())
continue;
merge = new MergedList<>(variantsx, variantsy);
}
LOG.info("testing : [" + x + "]" + intervalx + " [" + y + "]" + intervaly + " N:" + geneList.size() + " best: " + bestSkat);
final Double skat = eval(executor, merge);
if (skat == null)
continue;
if (bestSkat == null || skat.compareTo(bestSkat) < 0) {
bestSkat = skat;
LOG.info("best " + bestSkat + " " + intervalx + " " + intervaly);
if (this.outputFile.getName().endsWith(".vcf") || this.outputFile.getName().endsWith(".vcf.gz")) {
final VCFHeader header2 = new VCFHeader(header);
header2.addMetaDataLine(new VCFHeaderLine(VcfGeneEpistasis.class.getName(), intervalx.getName() + " " + intervaly.getName() + " " + bestSkat));
final VariantContextWriter w = VCFUtils.createVariantContextWriter(outputFile);
w.writeHeader(header2);
merge.stream().sorted(ctxSorter).forEach(V -> w.add(V));
w.close();
} else {
final PrintWriter w = super.openFileOrStdoutAsPrintWriter(outputFile);
w.println(String.valueOf(bestSkat) + "\t" + intervalx.getName() + "\t" + intervaly.getName());
w.flush();
w.close();
}
}
}
}
return 0;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
CloserUtil.close(iter);
CloserUtil.close(this.vcfFileReader);
}
}
use of com.github.lindenb.jvarkit.util.vcf.VcfTools in project jvarkit by lindenb.
the class GroupByGene method read.
private void read(final String input) throws IOException {
LineIterator lineiter = null;
SortingCollection<Call> sortingCollection = null;
try {
final Pattern regexType = (StringUtil.isBlank(this.typeRegexExclude) ? null : Pattern.compile(this.typeRegexExclude));
lineiter = (input == null ? IOUtils.openStreamForLineIterator(stdin()) : IOUtils.openURIForLineIterator(input));
sortingCollection = SortingCollection.newInstance(Call.class, new CallCodec(), (C1, C2) -> {
int i = C1.compareTo(C2);
if (i != 0)
return i;
return C1.line.compareTo(C2.line);
}, this.writingSortingCollection.getMaxRecordsInRam(), this.writingSortingCollection.getTmpPaths());
sortingCollection.setDestructiveIteration(true);
final VCFUtils.CodecAndHeader cah = VCFUtils.parseHeader(lineiter);
final VCFHeader header = cah.header;
this.the_dictionary = header.getSequenceDictionary();
if (this.the_dictionary == null || this.the_dictionary.isEmpty()) {
throw new JvarkitException.DictionaryMissing(input);
}
this.the_codec = cah.codec;
final List<String> sampleNames;
if (header.getSampleNamesInOrder() != null) {
sampleNames = header.getSampleNamesInOrder();
} else {
sampleNames = Collections.emptyList();
}
final VcfTools vcfTools = new VcfTools(header);
final Pedigree pedigree;
if (this.pedigreeFile != null) {
pedigree = Pedigree.newParser().parse(this.pedigreeFile);
} else {
pedigree = Pedigree.newParser().parse(header);
}
final Pattern tab = Pattern.compile("[\t]");
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(the_dictionary).logger(LOG);
while (lineiter.hasNext()) {
String line = lineiter.next();
final VariantContext ctx = progress.watch(this.the_codec.decode(line));
if (!ctx.isVariant())
continue;
if (ignore_filtered && ctx.isFiltered())
continue;
// simplify line
final String[] tokens = tab.split(line);
// ID
tokens[2] = VCFConstants.EMPTY_ID_FIELD;
// QUAL
tokens[5] = VCFConstants.MISSING_VALUE_v4;
// FILTER
tokens[6] = VCFConstants.UNFILTERED;
// INFO
tokens[7] = VCFConstants.EMPTY_INFO_FIELD;
line = String.join(VCFConstants.FIELD_SEPARATOR, Arrays.asList(tokens));
for (final GeneName g : getGenes(vcfTools, ctx)) {
if (regexType != null && regexType.matcher(g.type).matches())
continue;
final Call c = new Call();
c.line = line;
c.gene = g;
sortingCollection.add(c);
}
}
CloserUtil.close(lineiter);
lineiter = null;
sortingCollection.doneAdding();
/**
* dump
*/
final Set<String> casesSamples = pedigree.getPersons().stream().filter(P -> P.isAffected()).map(P -> P.getId()).filter(ID -> sampleNames.contains(ID)).collect(Collectors.toSet());
final Set<String> controlsSamples = pedigree.getPersons().stream().filter(P -> P.isUnaffected()).map(P -> P.getId()).filter(ID -> sampleNames.contains(ID)).collect(Collectors.toSet());
final Set<String> maleSamples = pedigree.getPersons().stream().filter(P -> P.isMale()).map(P -> P.getId()).filter(ID -> sampleNames.contains(ID)).collect(Collectors.toSet());
final Set<String> femaleSamples = pedigree.getPersons().stream().filter(P -> P.isFemale()).map(P -> P.getId()).filter(ID -> sampleNames.contains(ID)).collect(Collectors.toSet());
final Predicate<Genotype> genotypeFilter = genotype -> {
if (!genotype.isAvailable())
return false;
if (!genotype.isCalled())
return false;
if (genotype.isNoCall())
return false;
if (genotype.isHomRef())
return false;
if (this.ignore_filtered_genotype && genotype.isFiltered())
return false;
return true;
};
PrintStream pw = openFileOrStdoutAsPrintStream(this.outFile);
pw.print("#chrom");
pw.print('\t');
pw.print("min.POS");
pw.print('\t');
pw.print("max.POS");
pw.print('\t');
pw.print("gene.name");
pw.print('\t');
pw.print("gene.type");
pw.print('\t');
pw.print("samples.affected");
pw.print('\t');
pw.print("count.variations");
if (!casesSamples.isEmpty()) {
pw.print('\t');
pw.print("pedigree.cases");
}
if (!controlsSamples.isEmpty()) {
pw.print('\t');
pw.print("pedigree.controls");
}
if (!maleSamples.isEmpty()) {
pw.print('\t');
pw.print("pedigree.males");
}
if (!femaleSamples.isEmpty()) {
pw.print('\t');
pw.print("pedigree.females");
}
if (this.print_fisher && !controlsSamples.isEmpty() && !casesSamples.isEmpty()) {
pw.print('\t');
pw.print("fisher");
}
for (final String sample : sampleNames) {
pw.print('\t');
pw.print(sample);
}
pw.println();
final CloseableIterator<Call> iter = sortingCollection.iterator();
final EqualRangeIterator<Call> eqiter = new EqualRangeIterator<>(iter, (C1, C2) -> C1.compareTo(C2));
while (eqiter.hasNext()) {
final List<Call> row = eqiter.next();
final Call first = row.get(0);
final List<VariantContext> variantList = row.stream().map(R -> GroupByGene.this.the_codec.decode(R.line)).collect(Collectors.toList());
final int minPos = variantList.stream().mapToInt(R -> R.getStart()).min().getAsInt();
final int maxPos = variantList.stream().mapToInt(R -> R.getEnd()).max().getAsInt();
final Set<String> sampleCarryingMut = new HashSet<String>();
final Counter<String> pedCasesCarryingMut = new Counter<String>();
final Counter<String> pedCtrlsCarryingMut = new Counter<String>();
final Counter<String> malesCarryingMut = new Counter<String>();
final Counter<String> femalesCarryingMut = new Counter<String>();
final Counter<String> sample2count = new Counter<String>();
for (final VariantContext ctx : variantList) {
for (final Genotype genotype : ctx.getGenotypes()) {
if (!genotypeFilter.test(genotype))
continue;
final String sampleName = genotype.getSampleName();
sample2count.incr(sampleName);
sampleCarryingMut.add(sampleName);
if (casesSamples.contains(sampleName)) {
pedCasesCarryingMut.incr(sampleName);
}
if (controlsSamples.contains(sampleName)) {
pedCtrlsCarryingMut.incr(sampleName);
}
if (maleSamples.contains(sampleName)) {
malesCarryingMut.incr(sampleName);
}
if (femaleSamples.contains(sampleName)) {
femalesCarryingMut.incr(sampleName);
}
}
}
pw.print(first.getContig());
pw.print('\t');
// convert to bed
pw.print(minPos - 1);
pw.print('\t');
pw.print(maxPos);
pw.print('\t');
pw.print(first.gene.name);
pw.print('\t');
pw.print(first.gene.type);
pw.print('\t');
pw.print(sampleCarryingMut.size());
pw.print('\t');
pw.print(variantList.size());
if (!casesSamples.isEmpty()) {
pw.print('\t');
pw.print(pedCasesCarryingMut.getCountCategories());
}
if (!controlsSamples.isEmpty()) {
pw.print('\t');
pw.print(pedCtrlsCarryingMut.getCountCategories());
}
if (!maleSamples.isEmpty()) {
pw.print('\t');
pw.print(malesCarryingMut.getCountCategories());
}
if (!femaleSamples.isEmpty()) {
pw.print('\t');
pw.print(femalesCarryingMut.getCountCategories());
}
if (this.print_fisher && !controlsSamples.isEmpty() && !casesSamples.isEmpty()) {
int count_case_mut = 0;
int count_ctrl_mut = 0;
int count_case_wild = 0;
int count_ctrl_wild = 0;
for (final VariantContext ctx : variantList) {
for (final Genotype genotype : ctx.getGenotypes()) {
final String sampleName = genotype.getSampleName();
final boolean has_mutation = genotypeFilter.test(genotype);
if (controlsSamples.contains(sampleName)) {
if (has_mutation) {
count_ctrl_mut++;
} else {
count_ctrl_wild++;
}
} else if (casesSamples.contains(sampleName)) {
if (has_mutation) {
count_case_mut++;
} else {
count_case_wild++;
}
}
}
}
final FisherExactTest fisher = FisherExactTest.compute(count_case_mut, count_case_wild, count_ctrl_mut, count_ctrl_wild);
pw.print('\t');
pw.print(fisher.getAsDouble());
}
for (final String sample : sampleNames) {
pw.print('\t');
pw.print(sample2count.count(sample));
}
pw.println();
if (pw.checkError())
break;
}
eqiter.close();
iter.close();
pw.flush();
if (this.outFile != null)
pw.close();
} finally {
CloserUtil.close(lineiter);
if (sortingCollection != null)
sortingCollection.cleanup();
}
}
use of com.github.lindenb.jvarkit.util.vcf.VcfTools in project jvarkit by lindenb.
the class VCFFilterJS method doVcfToVcf.
@Override
protected int doVcfToVcf(final String inputName, final VcfIterator r, final VariantContextWriter w) {
try {
final VCFHeader header = r.getHeader();
final VcfTools vcfTools = new VcfTools(header);
final VCFHeader h2 = new VCFHeader(header);
addMetaData(h2);
final Pedigree pedigree;
if (pedigreeFile != null) {
pedigree = Pedigree.newParser().parse(this.pedigreeFile);
} else // try to read from VCF header
{
Pedigree p = null;
try {
p = Pedigree.newParser().parse(header);
} catch (final Exception err) {
LOG.warn("cannot decode pedigree from vcf header");
p = Pedigree.createEmptyPedigree();
}
pedigree = p;
}
final VCFFilterHeaderLine filterHeaderLine = (filteredTag.trim().isEmpty() ? null : new VCFFilterHeaderLine(this.filteredTag.trim(), "Filtered with " + getProgramName()));
if (filterHeaderLine != null)
h2.addMetaDataLine(filterHeaderLine);
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(header).logger(LOG);
final Bindings bindings = this.compiledScript.getEngine().createBindings();
bindings.put("header", header);
bindings.put("tools", vcfTools);
bindings.put("pedigree", pedigree);
bindings.put("individuals", Collections.unmodifiableList(pedigree.getPersons().stream().filter(P -> (P.isAffected() || P.isUnaffected())).filter(P -> P.hasUniqId()).filter(P -> header.getSampleNamesInOrder().contains(P.getId())).collect(Collectors.toList())));
for (final String jsonkv : this.jsonFiles) {
int eq = jsonkv.indexOf("=");
if (eq <= 0)
throw new JvarkitException.UserError("Bad format for json . expected key=/path/to/file.json but got '" + jsonkv + "'");
final String key = jsonkv.substring(0, eq);
final FileReader jsonFile = new FileReader(jsonkv.substring(eq + 1));
JsonParser jsonParser = new JsonParser();
final JsonElement root = jsonParser.parse(jsonFile);
jsonFile.close();
bindings.put(key, root);
}
w.writeHeader(h2);
while (r.hasNext() && !w.checkError()) {
final VariantContext variation = progress.watch(r.next());
bindings.put("variant", variation);
final Object result = compiledScript.eval(bindings);
// result is an array of a collection of variants
if (result != null && (result.getClass().isArray() || (result instanceof Collection))) {
final Collection<?> col;
if (result.getClass().isArray()) {
final Object[] array = (Object[]) result;
col = Arrays.asList(array);
} else {
col = (Collection<?>) result;
}
// write all of variants
for (final Object item : col) {
if (item == null)
throw new JvarkitException.UserError("item in array is null");
if (!(item instanceof VariantContext))
throw new JvarkitException.UserError("item in array is not a VariantContext " + item.getClass());
w.add(VariantContext.class.cast(item));
}
} else // result is a VariantContext
if (result != null && (result instanceof VariantContext)) {
w.add(VariantContext.class.cast(result));
} else {
boolean accept = true;
if (result == null) {
accept = false;
} else if (result instanceof Boolean) {
if (Boolean.FALSE.equals(result))
accept = false;
} else if (result instanceof Number) {
if (((Number) result).intValue() != 1)
accept = false;
} else {
LOG.warn("Script returned something that is not a boolean or a number:" + result.getClass());
accept = false;
}
if (!accept) {
if (filterHeaderLine != null) {
final VariantContextBuilder vcb = new VariantContextBuilder(variation);
vcb.filter(filterHeaderLine.getID());
w.add(vcb.make());
}
continue;
}
// set PASS filter if needed
if (filterHeaderLine != null && !variation.isFiltered()) {
w.add(new VariantContextBuilder(variation).passFilters().make());
continue;
}
w.add(variation);
}
}
return RETURN_OK;
} catch (final Exception err) {
LOG.error(err);
return -1;
} finally {
}
}
use of com.github.lindenb.jvarkit.util.vcf.VcfTools in project jvarkit by lindenb.
the class KnimeVariantHelper method forEachVariantData.
public Stream<VariantData> forEachVariantData(final String vcfFile) throws IOException {
final File file = new File(vcfFile);
IOUtil.assertFileIsReadable(file);
final VCFFileReader r = new VCFFileReader(file, false);
final VCFHeader header = r.getFileHeader();
this.init(header);
final VcfTools vcfTools = new VcfTools(header);
final CloseableIterator<VariantContext> iter = r.iterator();
final Iterable<VariantContext> iterable = () -> iter;
return StreamSupport.stream(iterable.spliterator(), false).onClose(() -> {
CloserUtil.close(iter);
CloserUtil.close(r);
}).map(V -> new VariantData(V) {
@Override
public VCFHeader getVcfHeader() {
return header;
}
@Override
public VcfTools getVcfTools() {
return vcfTools;
}
});
}
use of com.github.lindenb.jvarkit.util.vcf.VcfTools in project jvarkit by lindenb.
the class VcfLoopOverGenes method doWork.
@SuppressWarnings("resource")
@Override
public int doWork(final List<String> args) {
PrintWriter pw = null;
VCFFileReader vcfFileReader = null;
CloseableIterator<VariantContext> iter = null;
CloseableIterator<GeneLoc> iter2 = null;
BufferedReader br = null;
ArchiveFactory archive = null;
try {
final File vcf = new File(oneAndOnlyOneFile(args));
vcfFileReader = new VCFFileReader(vcf, (this.geneFile != null || !StringUtil.isBlank(this.regionStr)));
this.dictionary = vcfFileReader.getFileHeader().getSequenceDictionary();
if (this.dictionary == null) {
throw new JvarkitException.VcfDictionaryMissing(vcf);
}
final VcfTools tools = new VcfTools(vcfFileReader.getFileHeader());
if (!this.prefix.isEmpty() && !this.prefix.endsWith(".")) {
this.prefix += ".";
}
if (this.geneFile == null) {
final SortingCollection<GeneLoc> sortingCollection = SortingCollection.newInstance(GeneLoc.class, new GeneLocCodec(), (A, B) -> A.compareTo(B), this.writingSortingCollection.getMaxRecordsInRam(), this.writingSortingCollection.getTmpPaths());
sortingCollection.setDestructiveIteration(true);
if (StringUtil.isBlank(this.regionStr)) {
iter = vcfFileReader.iterator();
} else {
final IntervalParser parser = new IntervalParser(this.dictionary);
parser.setContigNameIsWholeContig(true);
final Interval interval = parser.parse(this.regionStr);
if (interval == null) {
LOG.error("Cannot parse interval " + this.regionStr);
return -1;
}
iter = vcfFileReader.query(interval.getContig(), interval.getStart(), interval.getEnd());
}
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(vcfFileReader.getFileHeader()).logger(LOG);
if (this.splitMethod.equals(SplitMethod.Annotations)) {
while (iter.hasNext()) {
final VariantContext ctx = progress.watch(iter.next());
for (final AnnPredictionParser.AnnPrediction pred : tools.getAnnPredictionParser().getPredictions(ctx)) {
if (this.snpEffNoIntergenic && pred.isIntergenicRegion()) {
continue;
}
if (!StringUtil.isBlank(pred.getGeneName())) {
sortingCollection.add(create(ctx, pred.getGeneName(), SourceType.ANN_GeneName));
}
if (!StringUtil.isBlank(pred.getGeneId())) {
sortingCollection.add(create(ctx, pred.getGeneId(), SourceType.ANN_GeneID));
}
if (!StringUtil.isBlank(pred.getFeatureId())) {
sortingCollection.add(create(ctx, pred.getFeatureId(), SourceType.ANN_FeatureID));
}
}
for (final VepPredictionParser.VepPrediction pred : tools.getVepPredictionParser().getPredictions(ctx)) {
if (!StringUtil.isBlank(pred.getGene())) {
sortingCollection.add(create(ctx, pred.getGene(), SourceType.VEP_Gene));
}
if (!StringUtil.isBlank(pred.getFeature())) {
sortingCollection.add(create(ctx, pred.getFeature(), SourceType.VEP_Feature));
}
if (!StringUtil.isBlank(pred.getSymbol())) {
sortingCollection.add(create(ctx, pred.getSymbol(), SourceType.VEP_Symbol));
}
if (!StringUtil.isBlank(pred.getHgncId())) {
sortingCollection.add(create(ctx, pred.getHgncId(), SourceType.VEP_HgncId));
}
}
}
} else /**
* split VCF per sliding window of variants
*/
if (this.splitMethod.equals(SplitMethod.VariantSlidingWindow)) {
if (this.variantsWinCount < 1) {
LOG.error("Bad value for variantsWinCount");
return -1;
}
if (this.variantsWinShift < 1 || this.variantsWinShift > this.variantsWinCount) {
LOG.error("Bad value for variantsWinShift");
return -1;
}
final List<VariantContext> buffer = new ArrayList<>(this.variantsWinCount);
/**
* routine to dump buffer into sorting collection
*/
final Runnable dumpBuffer = () -> {
if (buffer.isEmpty())
return;
final String contig = buffer.get(0).getContig();
final int chromStart = buffer.stream().mapToInt(CTX -> CTX.getStart()).min().getAsInt();
// use last of start too
final int chromEnd0 = buffer.stream().mapToInt(CTX -> CTX.getStart()).max().getAsInt();
// final int chromEnd1 = buffer.stream().mapToInt(CTX->CTX.getEnd()).max().getAsInt();
final String identifier = contig + "_" + String.format(NUM_FORMAT, chromStart) + "_" + String.format(NUM_FORMAT, chromEnd0);
for (final VariantContext ctx : buffer) {
sortingCollection.add(create(ctx, identifier, SourceType.SlidingVariants));
}
};
while (iter.hasNext()) {
VariantContext ctx = progress.watch(iter.next());
/* reduce the memory footprint for this context */
ctx = new VariantContextBuilder(ctx).genotypes(Collections.emptyList()).unfiltered().rmAttributes(new ArrayList<>(ctx.getAttributes().keySet())).make();
if (!buffer.isEmpty() && !buffer.get(0).getContig().equals(ctx.getContig())) {
dumpBuffer.run();
buffer.clear();
}
buffer.add(ctx);
if (buffer.size() >= this.variantsWinCount) {
dumpBuffer.run();
final int fromIndex = Math.min(this.variantsWinShift, buffer.size());
buffer.subList(0, fromIndex).clear();
}
}
dumpBuffer.run();
buffer.clear();
} else if (this.splitMethod.equals(SplitMethod.ContigSlidingWindow)) {
if (this.contigWinLength < 1) {
LOG.error("Bad value for contigWinCount");
return -1;
}
if (this.contigWinShift < 1 || this.contigWinShift > this.contigWinLength) {
LOG.error("Bad value for contigWinShift");
return -1;
}
while (iter.hasNext()) {
VariantContext ctx = progress.watch(iter.next());
/* reduce the memory footprint for this context */
ctx = new VariantContextBuilder(ctx).genotypes(Collections.emptyList()).unfiltered().rmAttributes(new ArrayList<>(ctx.getAttributes().keySet())).make();
int start = 0;
while (start <= ctx.getStart()) {
if (start + this.contigWinLength >= ctx.getStart()) {
final int chromStart = start;
final int chromEnd0 = start + this.contigWinLength;
final String identifier = ctx.getContig() + "_" + String.format(NUM_FORMAT, chromStart) + "_" + String.format(NUM_FORMAT, chromEnd0);
sortingCollection.add(create(ctx, identifier, SourceType.SlidingContig));
}
start += this.contigWinShift;
}
}
} else {
throw new IllegalStateException("No such method: " + this.splitMethod);
}
sortingCollection.doneAdding();
progress.finish();
iter.close();
iter = null;
pw = super.openFileOrStdoutAsPrintWriter(this.outputFile);
iter2 = sortingCollection.iterator();
final EqualRangeIterator<GeneLoc> eqiter = new EqualRangeIterator<>(iter2, this.compareGeneName);
int geneIdentifierId = 0;
while (eqiter.hasNext()) {
final List<GeneLoc> gene = eqiter.next();
pw.print(gene.get(0).contig);
pw.print('\t');
// -1 for BED
pw.print(gene.stream().mapToInt(G -> G.start).min().getAsInt() - 1);
pw.print('\t');
pw.print(gene.stream().mapToInt(G -> G.end).max().getAsInt());
pw.print('\t');
pw.print(this.prefix + String.format("%09d", ++geneIdentifierId));
pw.print('\t');
pw.print(gene.get(0).geneName);
pw.print('\t');
pw.print(gene.get(0).sourceType);
pw.print('\t');
pw.print(gene.size());
pw.println();
}
pw.flush();
pw.close();
pw = null;
eqiter.close();
iter2.close();
iter2 = null;
sortingCollection.cleanup();
} else {
if (this.nJobs < 1) {
this.nJobs = Math.max(1, Runtime.getRuntime().availableProcessors());
LOG.info("setting njobs to " + this.nJobs);
}
final ExecutorService executorService;
final List<Future<Integer>> futureResults;
if (this.nJobs > 1) {
executorService = new ThreadPoolExecutor(this.nJobs, this.nJobs, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue<Runnable>());
futureResults = new ArrayList<>();
} else {
executorService = null;
futureResults = Collections.emptyList();
}
if (this.outputFile == null) {
LOG.error("When scanning a VCF with " + this.geneFile + ". Output file must be defined");
}
if (!this.exec.isEmpty()) {
if (this.outputFile.getName().endsWith(".zip")) {
LOG.error("Cannot execute " + this.exec + " when saving to a zip.");
return -1;
}
}
archive = ArchiveFactory.open(this.outputFile);
PrintWriter manifest = this.deleteAfterCommand && !this.exec.isEmpty() ? // all files will be deleted, no manifest needed
new PrintWriter(new NullOuputStream()) : archive.openWriter(this.prefix + "manifest.txt");
br = IOUtils.openFileForBufferedReading(this.geneFile);
final BedLineCodec bedCodec = new BedLineCodec();
for (; ; ) {
if (!futureResults.isEmpty()) {
int i = 0;
while (i < futureResults.size()) {
final Future<Integer> r = futureResults.get(i);
if (r.isCancelled()) {
LOG.error("Task was canceled. Break.");
return -1;
} else if (r.isDone()) {
futureResults.remove(i);
int rez = r.get();
if (rez != 0) {
LOG.error("Task Failed (" + rez + "). Break");
}
} else {
i++;
}
}
}
final String line = br.readLine();
if (line == null)
break;
if (line.startsWith("#") || line.isEmpty())
continue;
final BedLine bedLine = bedCodec.decode(line);
if (bedLine == null)
continue;
// ID
final String geneIdentifier = bedLine.get(3);
// name
final String geneName = bedLine.get(4);
final SourceType sourceType = SourceType.valueOf(bedLine.get(5));
final String filename = geneIdentifier;
final String outputVcfName = (filename.startsWith(this.prefix) ? "" : this.prefix) + filename + ".vcf" + (this.compress ? ".gz" : "");
LOG.info(bedLine.getContig() + ":" + bedLine.getStart() + "-" + bedLine.getEnd() + " length :" + (bedLine.getEnd() - bedLine.getStart()));
if (bedLine.getEnd() - bedLine.getStart() > 1E6) {
LOG.warn("That's a large region ! " + bedLine);
}
OutputStream vcfOutputStream = null;
VariantContextWriter vw = null;
int countVariants = 0;
final SAMSequenceDictionaryProgress progress = new SAMSequenceDictionaryProgress(vcfFileReader.getFileHeader()).logger(LOG).prefix(geneName + " " + bedLine.getContig() + ":" + bedLine.getStart() + "-" + bedLine.getEnd());
iter = vcfFileReader.query(bedLine.getContig(), bedLine.getStart(), bedLine.getEnd());
while (iter.hasNext()) {
VariantContext ctx = progress.watch(iter.next());
switch(sourceType) {
case SlidingVariants:
{
// nothing
break;
}
case SlidingContig:
{
// nothing
break;
}
case ANN_GeneName:
case ANN_FeatureID:
case ANN_GeneID:
{
final List<String> preds = new ArrayList<>();
for (final AnnPredictionParser.AnnPrediction pred : tools.getAnnPredictionParser().getPredictions(ctx)) {
final String predictionIdentifier;
switch(sourceType) {
case ANN_GeneName:
predictionIdentifier = pred.getGeneName();
break;
case ANN_FeatureID:
predictionIdentifier = pred.getFeatureId();
break;
case ANN_GeneID:
predictionIdentifier = pred.getGeneId();
break;
default:
throw new IllegalStateException(bedLine.toString());
}
if (StringUtil.isBlank(predictionIdentifier))
continue;
if (!geneName.equals(predictionIdentifier))
continue;
preds.add(pred.getOriginalAttributeAsString());
}
if (preds.isEmpty()) {
ctx = null;
} else {
ctx = new VariantContextBuilder(ctx).rmAttribute(tools.getAnnPredictionParser().getTag()).attribute(tools.getAnnPredictionParser().getTag(), preds).make();
}
break;
}
case VEP_Gene:
case VEP_Feature:
case VEP_Symbol:
case VEP_HgncId:
{
final List<String> preds = new ArrayList<>();
for (final VepPredictionParser.VepPrediction pred : tools.getVepPredictions(ctx)) {
final String predictionIdentifier;
switch(sourceType) {
case VEP_Gene:
predictionIdentifier = pred.getGene();
break;
case VEP_Feature:
predictionIdentifier = pred.getFeature();
break;
case VEP_Symbol:
predictionIdentifier = pred.getSymbol();
break;
case VEP_HgncId:
predictionIdentifier = pred.getHgncId();
break;
default:
throw new IllegalStateException(bedLine.toString());
}
if (StringUtil.isBlank(predictionIdentifier))
continue;
if (!geneName.equals(predictionIdentifier))
continue;
preds.add(pred.getOriginalAttributeAsString());
}
if (preds.isEmpty()) {
ctx = null;
} else {
ctx = new VariantContextBuilder(ctx).rmAttribute(tools.getVepPredictionParser().getTag()).attribute(tools.getVepPredictionParser().getTag(), preds).make();
}
break;
}
default:
throw new IllegalStateException(bedLine.toString());
}
if (ctx == null)
continue;
if (vcfOutputStream == null) {
LOG.info(filename);
manifest.println(outputVcfName);
final VCFHeader header = new VCFHeader(vcfFileReader.getFileHeader());
header.addMetaDataLine(new VCFHeaderLine(VCF_HEADER_SPLITKEY, filename));
vcfOutputStream = archive.openOuputStream(outputVcfName);
vw = VCFUtils.createVariantContextWriterToOutputStream(vcfOutputStream);
vw.writeHeader(header);
}
countVariants++;
vw.add(ctx);
if (countVariants % 1000 == 0) {
LOG.info("Loading : " + geneIdentifier + " N=" + countVariants);
}
}
progress.finish();
LOG.info(geneIdentifier + " N=" + countVariants);
if (vcfOutputStream != null) {
vw.close();
vcfOutputStream.flush();
vcfOutputStream.close();
vw = null;
if (!this.exec.isEmpty()) {
final Callable<Integer> callable = () -> {
final File vcfOutFile = new File(this.outputFile, outputVcfName);
IOUtil.assertFileIsReadable(vcfOutFile);
final String vcfPath = vcfOutFile.getPath();
final StringTokenizer st = new StringTokenizer(this.exec);
final List<String> command = new ArrayList<>(1 + st.countTokens());
while (st.hasMoreTokens()) {
String token = st.nextToken().replaceAll("__PREFIX__", this.prefix).replaceAll("__CONTIG__", bedLine.getContig()).replaceAll("__CHROM__", bedLine.getContig()).replaceAll("__ID__", geneIdentifier).replaceAll("__NAME__", geneName).replaceAll("__START__", String.valueOf(bedLine.getStart())).replaceAll("__END__", String.valueOf(bedLine.getEnd())).replaceAll("__SOURCE__", sourceType.name()).replaceAll("__VCF__", vcfPath);
command.add(token);
}
LOG.info(command.stream().map(S -> "'" + S + "'").collect(Collectors.joining(" ")));
final ProcessBuilder pb = new ProcessBuilder(command);
pb.redirectErrorStream(true);
final Process p = pb.start();
final Thread stdoutThread = new Thread(() -> {
try {
InputStream in = p.getInputStream();
IOUtils.copyTo(in, stdout());
} catch (Exception err) {
LOG.error(err);
}
});
stdoutThread.start();
int exitValue = p.waitFor();
if (exitValue != 0) {
LOG.error("Command failed (" + exitValue + "):" + String.join(" ", command));
return -1;
} else {
if (deleteAfterCommand) {
if (!vcfOutFile.delete()) {
LOG.warn("Cannot delete " + vcfOutFile);
}
}
return 0;
}
};
if (executorService != null) {
final Future<Integer> rez = executorService.submit(callable);
futureResults.add(rez);
} else {
final int ret = callable.call();
if (ret != 0) {
LOG.error("Error with process (" + ret + ")");
return ret;
}
}
}
} else {
manifest.println("#" + filename);
LOG.warn("No Variant Found for " + line);
}
iter.close();
}
;
if (executorService != null) {
LOG.info("shutdown");
executorService.shutdown();
executorService.awaitTermination(365, TimeUnit.DAYS);
}
br.close();
br = null;
manifest.close();
archive.close();
archive = null;
LOG.info("Done");
}
vcfFileReader.close();
vcfFileReader = null;
return 0;
} catch (Exception e) {
LOG.error(e);
return -1;
} finally {
{
CloserUtil.close(iter2);
CloserUtil.close(iter);
CloserUtil.close(pw);
CloserUtil.close(vcfFileReader);
CloserUtil.close(br);
CloserUtil.close(archive);
}
}
}
Aggregations