use of org.apache.commons.lang3.math.Fraction in project pcgen by PCGen.
the class PCGenTaskExecutor method progressChanged.
@Override
public void progressChanged(PCGenTaskEvent event) {
if (currentTask.getMaximum() == 0) {
return;
}
Fraction progress = Fraction.getFraction(currentTask.getProgress(), currentTask.getMaximum());
progress = progress.multiplyBy(progressMultiplier);
progress = baseProgress.add(progress);
setValues(currentTask.getMessage(), progress.getNumerator(), progress.getDenominator());
}
use of org.apache.commons.lang3.math.Fraction in project gatk by broadinstitute.
the class PerformJointSegmentation method doWork.
@Override
public Object doWork() {
ParamUtils.isPositive(initialNumAFStates, "Must have at least one allele-fraction state.");
ParamUtils.isPositive(initialNumCRStates, "Must have at least one copy-ratio state.");
final AllelicPanelOfNormals allelicPoN = allelicPoNFile != null ? AllelicPanelOfNormals.read(allelicPoNFile) : AllelicPanelOfNormals.EMPTY_PON;
final AllelicCountCollection acc = new AllelicCountCollection(snpCountsFile);
final ReadCountCollection rcc;
try {
rcc = ReadCountCollectionUtils.parse(new File(coverageFile));
} catch (final IOException ex) {
throw new UserException.BadInput("could not read input file");
}
final JointAFCRSegmenter jointSegmenter = JointAFCRSegmenter.createJointSegmenter(initialNumCRStates, rcc, initialNumAFStates, acc, allelicPoN);
final List<Pair<SimpleInterval, AFCRHiddenState>> segmentation = jointSegmenter.findSegments();
final List<ACNVModeledSegment> segments = segmentation.stream().map(pair -> new ACNVModeledSegment(pair.getLeft(), errorlessPosterior(pair.getRight().getLog2CopyRatio()), errorlessPosterior(pair.getRight().getMinorAlleleFraction()))).collect(Collectors.toList());
//TODO: make more reasonable output for ACNV 2.0
SegmentUtils.writeACNVModeledSegmentFile(outputSegmentsFile, segments, new Genome(rcc, acc.getCounts()));
return "SUCCESS";
}
use of org.apache.commons.lang3.math.Fraction in project gatk-protected by broadinstitute.
the class AllelicPanelOfNormalsCreator method create.
/**
* Creates an {@link AllelicPanelOfNormals} given a site-frequency threshold;
* sites appearing in strictly less than this fraction of samples will not be included in the panel of normals.
* @param siteFrequencyThreshold site-frequency threshold
* @return an {@link AllelicPanelOfNormals} containing sites
* above the site-frequency threshold
*/
public AllelicPanelOfNormals create(final double siteFrequencyThreshold) {
logger.info("Creating allelic panel of normals...");
//used to filter on frequency
final Map<SimpleInterval, MutableInt> numberOfSamplesMap = new HashMap<>();
//store only the total counts (smaller memory footprint)
final Map<SimpleInterval, AllelicCount> totalCountsMap = new HashMap<>();
int pulldownFileCounter = 1;
final int totalNumberOfSamples = pulldownFiles.size();
for (final File pulldownFile : pulldownFiles) {
logger.info("Processing pulldown file " + pulldownFileCounter++ + "/" + totalNumberOfSamples + " (" + pulldownFile + ")...");
final AllelicCountCollection pulldownCounts = new AllelicCountCollection(pulldownFile);
for (final AllelicCount count : pulldownCounts.getCounts()) {
//update the sum of ref and alt counts at each site
final SimpleInterval site = count.getInterval();
final AllelicCount currentCountAtSite = totalCountsMap.getOrDefault(site, new AllelicCount(site, 0, 0));
final AllelicCount updatedCountAtSite = new AllelicCount(site, currentCountAtSite.getRefReadCount() + count.getRefReadCount(), currentCountAtSite.getAltReadCount() + count.getAltReadCount());
totalCountsMap.put(site, updatedCountAtSite);
//update the number of samples seen possessing each site
final MutableInt numberOfSamplesAtSite = numberOfSamplesMap.get(site);
if (numberOfSamplesAtSite == null) {
numberOfSamplesMap.put(site, new MutableInt(1));
} else {
numberOfSamplesAtSite.increment();
}
}
}
logger.info("Total number of unique sites present in samples: " + totalCountsMap.size());
//filter out sites that appear at a frequency strictly less than the provided threshold
final AllelicCountCollection totalCounts = new AllelicCountCollection();
numberOfSamplesMap.entrySet().stream().filter(e -> e.getValue().doubleValue() / totalNumberOfSamples >= siteFrequencyThreshold).map(e -> totalCountsMap.get(e.getKey())).forEach(totalCounts::add);
logger.info(String.format("Number of unique sites present in samples above site frequency = %4.3f: %d", siteFrequencyThreshold, totalCounts.getCounts().size()));
return new AllelicPanelOfNormals(totalCounts);
}
use of org.apache.commons.lang3.math.Fraction in project gatk-protected by broadinstitute.
the class PerformJointSegmentation method doWork.
@Override
public Object doWork() {
ParamUtils.isPositive(initialNumAFStates, "Must have at least one allele-fraction state.");
ParamUtils.isPositive(initialNumCRStates, "Must have at least one copy-ratio state.");
final AllelicPanelOfNormals allelicPoN = allelicPoNFile != null ? AllelicPanelOfNormals.read(allelicPoNFile) : AllelicPanelOfNormals.EMPTY_PON;
final AllelicCountCollection acc = new AllelicCountCollection(snpCountsFile);
final ReadCountCollection rcc;
try {
rcc = ReadCountCollectionUtils.parse(new File(coverageFile));
} catch (final IOException ex) {
throw new UserException.BadInput("could not read input file");
}
final JointAFCRSegmenter jointSegmenter = JointAFCRSegmenter.createJointSegmenter(initialNumCRStates, rcc, initialNumAFStates, acc, allelicPoN);
final List<Pair<SimpleInterval, AFCRHiddenState>> segmentation = jointSegmenter.findSegments();
final List<ACNVModeledSegment> segments = segmentation.stream().map(pair -> new ACNVModeledSegment(pair.getLeft(), errorlessPosterior(pair.getRight().getLog2CopyRatio()), errorlessPosterior(pair.getRight().getMinorAlleleFraction()))).collect(Collectors.toList());
//TODO: make more reasonable output for ACNV 2.0
SegmentUtils.writeACNVModeledSegmentFile(outputSegmentsFile, segments, new Genome(rcc, acc.getCounts()));
return "SUCCESS";
}
use of org.apache.commons.lang3.math.Fraction in project opencast by opencast.
the class AnalyzeTracksWorkflowOperationHandler method start.
@Override
public WorkflowOperationResult start(WorkflowInstance workflowInstance, JobContext context) throws WorkflowOperationException {
logger.info("Running analyze-tracks workflow operation on workflow {}", workflowInstance.getId());
final MediaPackage mediaPackage = workflowInstance.getMediaPackage();
final String sourceFlavor = getConfig(workflowInstance, OPT_SOURCE_FLAVOR);
Map<String, String> properties = new HashMap<>();
final MediaPackageElementFlavor flavor = MediaPackageElementFlavor.parseFlavor(sourceFlavor);
final Track[] tracks = mediaPackage.getTracks(flavor);
if (tracks.length <= 0) {
if (BooleanUtils.toBoolean(getConfig(workflowInstance, OPT_FAIL_NO_TRACK, "false"))) {
throw new WorkflowOperationException("No matching tracks for flavor " + sourceFlavor);
}
logger.info("No tracks with specified flavors ({}) to analyse.", sourceFlavor);
return createResult(mediaPackage, properties, Action.CONTINUE, 0);
}
List<Fraction> aspectRatios = getAspectRatio(getConfig(workflowInstance, OPT_VIDEO_ASPECT, ""));
for (Track track : tracks) {
final String varName = toVariableName(track.getFlavor());
properties.put(varName + "_media", "true");
properties.put(varName + "_video", Boolean.toString(track.hasVideo()));
properties.put(varName + "_audio", Boolean.toString(track.hasAudio()));
// Check resolution
if (track.hasVideo()) {
for (VideoStream video : ((TrackImpl) track).getVideo()) {
// Set resolution variables
properties.put(varName + "_resolution_x", video.getFrameWidth().toString());
properties.put(varName + "_resolution_y", video.getFrameHeight().toString());
Fraction trackAspect = Fraction.getReducedFraction(video.getFrameWidth(), video.getFrameHeight());
properties.put(varName + "_aspect", trackAspect.toString());
properties.put(varName + "_framerate", video.getFrameRate().toString());
// Check if we should fall back to nearest defined aspect ratio
if (!aspectRatios.isEmpty()) {
trackAspect = getNearestAspectRatio(trackAspect, aspectRatios);
properties.put(varName + "_aspect_snap", trackAspect.toString());
}
}
}
}
logger.info("Finished analyze-tracks workflow operation adding the properties: {}", properties);
return createResult(mediaPackage, properties, Action.CONTINUE, 0);
}
Aggregations