use of org.apache.commons.lang3.concurrent.ConcurrentRuntimeException in project GDSC-SMLM by aherbert.
the class Workflow method finishWorkers.
@SuppressWarnings("static-method")
private void finishWorkers(ArrayList<RunnableWorker> workers, ArrayList<Thread> threads, boolean now) {
// Finish work
for (final RunnableWorker w : workers) {
w.running = false;
}
// Finish work
for (int i = 0; i < threads.size(); i++) {
final Thread t = threads.get(i);
final RunnableWorker w = workers.get(i);
if (now) {
// Stop immediately any running worker
try {
t.interrupt();
} catch (final SecurityException ex) {
// We should have permission to interrupt this thread.
throw new ConcurrentRuntimeException("Cannot interrupt!", ex);
}
} else {
// Q. How to check if the worker is sleeping?
synchronized (w.inbox) {
w.inbox.notifyAll();
}
// Leave to finish their current work
try {
t.join(0);
} catch (final InterruptedException ex) {
// Unexpected
ConcurrencyUtils.interruptAndThrowUncheckedIf(true, ex);
}
}
}
}
use of org.apache.commons.lang3.concurrent.ConcurrentRuntimeException in project GDSC-SMLM by aherbert.
the class SeriesImageSource method closeQueue.
/**
* Close the background thread.
*/
private synchronized void closeQueue() {
if (threads != null) {
// Signal the workers to stop
for (final BaseWorker worker : workers) {
worker.run = false;
}
// Close the queues. This will wake any thread waiting for them to have capacity.
if (decodeQueue != null) {
decodeQueue.close(false);
readQueue.close(false);
}
rawFramesQueue.close(false);
// Join the threads and then set all to null
for (final Thread thread : threads) {
try {
// This thread will be waiting on imageQueue.put() (which has been cleared)
// or sourceQueue.take() which contains shutdown signals, it should be finished by now
thread.join();
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
throw new ConcurrentRuntimeException("Unexpected interruption", ex);
}
}
threads = null;
workers = null;
decodeQueue = null;
readQueue = null;
// Do not set this to null as it is used in nextRawFrame()
rawFramesQueue.close(false);
}
}
use of org.apache.commons.lang3.concurrent.ConcurrentRuntimeException in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFit method runFitting.
private BenchmarkSpotFitResult runFitting() {
// Extract all the results in memory into a list per frame. This can be cached
boolean refresh = false;
Pair<Integer, TIntObjectHashMap<List<Coordinate>>> coords = coordinateCache.get();
if (coords.getKey() != simulationParameters.id) {
// Do not get integer coordinates
// The Coordinate objects will be PeakResultPoint objects that store the original PeakResult
// from the MemoryPeakResults
coords = Pair.of(simulationParameters.id, ResultsMatchCalculator.getCoordinates(results, false));
coordinateCache.set(coords);
refresh = true;
}
final TIntObjectHashMap<List<Coordinate>> actualCoordinates = coords.getValue();
// Extract all the candidates into a list per frame. This can be cached if the settings have not
// changed
final int width = (config.isIncludeNeighbours()) ? config.getFittingWidth() : 0;
CandidateData candidateData = candidateDataCache.get();
if (refresh || candidateData == null || candidateData.differentSettings(filterResult.id, settings, width)) {
candidateData = subsetFilterResults(filterResult.filterResults, width);
candidateDataCache.set(candidateData);
}
final StopWatch stopWatch = StopWatch.createStarted();
final ImageStack stack = imp.getImageStack();
clearFitResults();
// Save results to memory
final MemoryPeakResults peakResults = new MemoryPeakResults();
peakResults.copySettings(this.results);
peakResults.setName(TITLE);
config.configureOutputUnits();
final FitConfiguration fitConfig = config.getFitConfiguration();
peakResults.setCalibration(fitConfig.getCalibration());
MemoryPeakResults.addResults(peakResults);
// Create a pool of workers
final int nThreads = Prefs.getThreads();
final BlockingQueue<Integer> jobs = new ArrayBlockingQueue<>(nThreads * 2);
final List<Worker> workers = new LinkedList<>();
final List<Thread> threads = new LinkedList<>();
final Ticker ticker = ImageJUtils.createTicker(stack.getSize(), nThreads, "Fitting frames ...");
final PeakResults syncResults = SynchronizedPeakResults.create(peakResults, nThreads);
for (int i = 0; i < nThreads; i++) {
final Worker worker = new Worker(jobs, stack, actualCoordinates, candidateData.filterCandidates, syncResults, ticker);
final Thread t = new Thread(worker);
workers.add(worker);
threads.add(t);
t.start();
}
// Fit the frames
final long startTime = System.nanoTime();
for (int i = 1; i <= stack.getSize(); i++) {
put(jobs, i);
}
// Finish all the worker threads by passing in a null job
for (int i = 0; i < threads.size(); i++) {
put(jobs, -1);
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
throw new ConcurrentRuntimeException(ex);
}
}
final long runTime = System.nanoTime() - startTime;
threads.clear();
ImageJUtils.finished();
if (ImageJUtils.isInterrupted()) {
return null;
}
stopWatch.stop();
final String timeString = stopWatch.toString();
IJ.log("Spot fit time : " + timeString);
IJ.showStatus("Collecting results ...");
if (fitConfig.isFitCameraCounts()) {
// Convert to photons for consistency
results.convertToPreferredUnits();
}
final TIntObjectHashMap<FilterCandidates> fitResults = new TIntObjectHashMap<>();
for (final Worker w : workers) {
fitResults.putAll(w.results);
}
// Assign a unique ID to each result
int count = 0;
// Materialise into an array since we use it twice
final FilterCandidates[] candidates = fitResults.values(new FilterCandidates[fitResults.size()]);
for (final FilterCandidates result : candidates) {
for (int i = 0; i < result.fitResult.length; i++) {
final MultiPathFitResult fitResult = result.fitResult[i];
count += count(fitResult.getSingleFitResult());
count += count(fitResult.getMultiFitResult());
count += count(fitResult.getDoubletFitResult());
count += count(fitResult.getMultiDoubletFitResult());
}
}
final PreprocessedPeakResult[] preprocessedPeakResults = new PreprocessedPeakResult[count];
count = 0;
for (final FilterCandidates result : candidates) {
for (int i = 0; i < result.fitResult.length; i++) {
final MultiPathFitResult fitResult = result.fitResult[i];
count = store(fitResult.getSingleFitResult(), count, preprocessedPeakResults);
count = store(fitResult.getMultiFitResult(), count, preprocessedPeakResults);
count = store(fitResult.getDoubletFitResult(), count, preprocessedPeakResults);
count = store(fitResult.getMultiDoubletFitResult(), count, preprocessedPeakResults);
}
}
final BenchmarkSpotFitResult newSpotFitResults = new BenchmarkSpotFitResult(simulationParameters.id, fitResults);
newSpotFitResults.distanceInPixels = distanceInPixels;
newSpotFitResults.lowerDistanceInPixels = lowerDistanceInPixels;
newSpotFitResults.stopWatch = stopWatch;
summariseResults(newSpotFitResults, runTime, preprocessedPeakResults, count, candidateData, actualCoordinates);
IJ.showStatus("");
spotFitResults.set(newSpotFitResults);
return newSpotFitResults;
}
use of org.apache.commons.lang3.concurrent.ConcurrentRuntimeException in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method scoreFilters.
@Nullable
private FilterScoreResult[] scoreFilters(FilterSet filterSet, boolean createTextResult) {
if (filterSet.size() == 0) {
return null;
}
initialiseScoring(filterSet);
FilterScoreResult[] scoreResults = new FilterScoreResult[filterSet.size()];
if (scoreResults.length == 1) {
// No need to multi-thread this
scoreResults[0] = scoreFilter((DirectFilter) filterSet.getFilters().get(0), defaultMinimalFilter, createTextResult, coordinateStore);
} else {
// Multi-thread score all the result
final int nThreads = getThreads(scoreResults.length);
final BlockingQueue<ScoreJob> jobs = new ArrayBlockingQueue<>(nThreads * 2);
final List<Thread> threads = new LinkedList<>();
final Ticker ticker = ImageJUtils.createTicker(scoreResults.length, nThreads, "Scoring Filters");
for (int i = 0; i < nThreads; i++) {
final ScoreWorker worker = new ScoreWorker(jobs, scoreResults, createTextResult, (coordinateStore == null) ? null : coordinateStore.newInstance(), ticker);
final Thread t = new Thread(worker);
threads.add(t);
t.start();
}
int index = 0;
for (final Filter filter : filterSet.getFilters()) {
if (IJ.escapePressed()) {
break;
}
put(jobs, new ScoreJob((DirectFilter) filter, index++));
}
// Finish all the worker threads by passing in a null job
for (int i = 0; i < threads.size(); i++) {
put(jobs, new ScoreJob(null, -1));
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (final InterruptedException ex) {
Logger.getLogger(BenchmarkFilterAnalysis.class.getName()).log(Level.WARNING, "Interrupted!", ex);
Thread.currentThread().interrupt();
throw new ConcurrentRuntimeException("Unexpected interruption", ex);
}
}
threads.clear();
ImageJUtils.finished();
// In case the threads were interrupted
if (ImageJUtils.isInterrupted()) {
scoreResults = null;
}
}
finishScoring();
return scoreResults;
}
use of org.apache.commons.lang3.concurrent.ConcurrentRuntimeException in project GDSC-SMLM by aherbert.
the class BenchmarkFit method runFit.
private void runFit() {
// Initialise the answer.
answer[Gaussian2DFunction.BACKGROUND] = benchmarkParameters.getBackground();
answer[Gaussian2DFunction.SIGNAL] = benchmarkParameters.getSignal();
answer[Gaussian2DFunction.X_POSITION] = benchmarkParameters.x;
answer[Gaussian2DFunction.Y_POSITION] = benchmarkParameters.y;
answer[Gaussian2DFunction.Z_POSITION] = benchmarkParameters.z;
answer[Gaussian2DFunction.X_SD] = benchmarkParameters.sd / benchmarkParameters.pixelPitch;
answer[Gaussian2DFunction.Y_SD] = benchmarkParameters.sd / benchmarkParameters.pixelPitch;
// Set up the fit region. Always round down since 0.5 is the centre of the pixel.
final int x = (int) benchmarkParameters.x;
final int y = (int) benchmarkParameters.y;
region = new Rectangle(x - regionSize, y - regionSize, 2 * regionSize + 1, 2 * regionSize + 1);
if (!new Rectangle(0, 0, imp.getWidth(), imp.getHeight()).contains(region)) {
// Check if it is incorrect by only 1 pixel
if (region.width <= imp.getWidth() + 1 && region.height <= imp.getHeight() + 1) {
ImageJUtils.log("Adjusting region %s to fit within image bounds (%dx%d)", region.toString(), imp.getWidth(), imp.getHeight());
region = new Rectangle(0, 0, imp.getWidth(), imp.getHeight());
} else {
IJ.error(TITLE, "Fit region does not fit within the image");
return;
}
}
// Adjust the centre & account for 0.5 pixel offset during fitting
answer[Gaussian2DFunction.X_POSITION] -= (region.x + 0.5);
answer[Gaussian2DFunction.Y_POSITION] -= (region.y + 0.5);
// Configure for fitting
fitConfig.setBackgroundFitting(backgroundFitting);
fitConfig.setNotSignalFitting(!signalFitting);
fitConfig.setComputeDeviations(false);
// Create the camera model
CameraModel cameraModel = fitConfig.getCameraModel();
// Crop for speed. Reset origin first so the region is within the model
cameraModel.setOrigin(0, 0);
cameraModel = cameraModel.crop(region, false);
final ImageStack stack = imp.getImageStack();
final int totalFrames = benchmarkParameters.frames;
// Create a pool of workers
final int nThreads = Prefs.getThreads();
final BlockingQueue<Integer> jobs = new ArrayBlockingQueue<>(nThreads * 2);
final List<Worker> workers = new LinkedList<>();
final List<Thread> threads = new LinkedList<>();
final Ticker ticker = ImageJUtils.createTicker(totalFrames, nThreads, "Fitting frames ...");
for (int i = 0; i < nThreads; i++) {
final Worker worker = new Worker(jobs, stack, region, fitConfig, cameraModel, ticker);
final Thread t = new Thread(worker);
workers.add(worker);
threads.add(t);
t.start();
}
// Store all the fitting results
results = new double[totalFrames * startPoints.length][];
resultsTime = new long[results.length];
// Fit the frames
for (int i = 0; i < totalFrames; i++) {
// Only fit if there were simulated photons
if (benchmarkParameters.framePhotons[i] > 0) {
put(jobs, i);
}
}
// Finish all the worker threads by passing in a null job
for (int i = 0; i < threads.size(); i++) {
put(jobs, -1);
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (final InterruptedException ex) {
Thread.currentThread().interrupt();
throw new ConcurrentRuntimeException(ex);
}
}
threads.clear();
if (hasOffsetXy()) {
ImageJUtils.log(TITLE + ": CoM within start offset = %d / %d (%s%%)", comValid.intValue(), totalFrames, MathUtils.rounded((100.0 * comValid.intValue()) / totalFrames));
}
ImageJUtils.finished("Collecting results ...");
// Collect the results
Statistics[] stats = null;
for (int i = 0; i < workers.size(); i++) {
final Statistics[] next = workers.get(i).stats;
if (stats == null) {
stats = next;
continue;
}
for (int j = 0; j < next.length; j++) {
stats[j].add(next[j]);
}
}
workers.clear();
Objects.requireNonNull(stats, "No statistics were computed");
// Show a table of the results
summariseResults(stats, cameraModel);
// Optionally show histograms
if (showHistograms) {
IJ.showStatus("Calculating histograms ...");
final WindowOrganiser windowOrganiser = new WindowOrganiser();
final double[] convert = getConversionFactors();
final HistogramPlotBuilder builder = new HistogramPlotBuilder(TITLE).setNumberOfBins(histogramBins);
for (int i = 0; i < NAMES.length; i++) {
if (displayHistograms[i] && convert[i] != 0) {
// We will have to convert the values...
final double[] tmp = ((StoredDataStatistics) stats[i]).getValues();
for (int j = 0; j < tmp.length; j++) {
tmp[j] *= convert[i];
}
final StoredDataStatistics tmpStats = StoredDataStatistics.create(tmp);
builder.setData(tmpStats).setName(NAMES[i]).setPlotLabel(String.format("%s +/- %s", MathUtils.rounded(tmpStats.getMean()), MathUtils.rounded(tmpStats.getStandardDeviation()))).show(windowOrganiser);
}
}
windowOrganiser.tile();
}
if (saveRawData) {
final String dir = ImageJUtils.getDirectory("Data_directory", rawDataDirectory);
if (dir != null) {
saveData(stats, dir);
}
}
IJ.showStatus("");
}
Aggregations