use of org.broadinstitute.hellbender.engine.filters.VariantFilter in project gatk by broadinstitute.
the class VariantWalkerBase method traverse.
/**
* Implementation of variant-based traversal.
* Subclasses can override to provide their own behavior but default implementation should be suitable for most uses.
*/
@Override
public void traverse() {
final VariantFilter variantfilter = makeVariantFilter();
final CountingReadFilter readFilter = makeReadFilter();
// Process each variant in the input stream.
StreamSupport.stream(getSpliteratorForDrivingVariants(), false).filter(variantfilter).forEach(variant -> {
final SimpleInterval variantInterval = new SimpleInterval(variant);
apply(variant, new ReadsContext(reads, variantInterval, readFilter), new ReferenceContext(reference, variantInterval), new FeatureContext(features, variantInterval));
progressMeter.update(variantInterval);
});
}
use of org.broadinstitute.hellbender.engine.filters.VariantFilter in project gatk by broadinstitute.
the class VariantWalkerSpark method getVariants.
/**
* Loads variants and the corresponding reads, reference and features into a {@link JavaRDD} for the intervals specified.
* FOr the current implementation the reads context will always be empty.
*
* If no intervals were specified, returns all the variants.
*
* @return all variants as a {@link JavaRDD}, bounded by intervals if specified.
*/
public JavaRDD<VariantWalkerContext> getVariants(JavaSparkContext ctx) {
SAMSequenceDictionary sequenceDictionary = getBestAvailableSequenceDictionary();
List<SimpleInterval> intervals = hasIntervals() ? getIntervals() : IntervalUtils.getAllIntervalsForReference(sequenceDictionary);
// use unpadded shards (padding is only needed for reference bases)
final List<ShardBoundary> intervalShards = intervals.stream().flatMap(interval -> Shard.divideIntervalIntoShards(interval, variantShardSize, 0, sequenceDictionary).stream()).collect(Collectors.toList());
JavaRDD<VariantContext> variants = variantsSource.getParallelVariantContexts(drivingVariantFile, getIntervals());
VariantFilter variantFilter = makeVariantFilter();
variants = variants.filter(variantFilter::test);
JavaRDD<Shard<VariantContext>> shardedVariants = SparkSharder.shard(ctx, variants, VariantContext.class, sequenceDictionary, intervalShards, variantShardSize, shuffle);
Broadcast<ReferenceMultiSource> bReferenceSource = hasReference() ? ctx.broadcast(getReference()) : null;
Broadcast<FeatureManager> bFeatureManager = features == null ? null : ctx.broadcast(features);
return shardedVariants.flatMap(getVariantsFunction(bReferenceSource, bFeatureManager, sequenceDictionary, variantShardPadding));
}
Aggregations