use of gnu.trove.map.hash.TIntObjectHashMap in project ProPPR by TeamCohen.
the class LightweightStateGraph method setOutlinks.
public void setOutlinks(State u, List<Outlink> outlinks) {
// wwc: why are we saving these outlinks as a trove thing? space?
int ui = this.nodeTab.getId(u);
if (near.containsKey(ui)) {
log.warn("Overwriting previous outlinks for state " + u);
edgeCount -= near.get(ui).size();
}
TIntArrayList nearui = new TIntArrayList(outlinks.size());
near.put(ui, nearui);
TIntObjectHashMap<TIntDoubleHashMap> fui = new TIntObjectHashMap<TIntDoubleHashMap>();
edgeFeatureDict.put(ui, fui);
for (Outlink o : outlinks) {
int vi = this.nodeTab.getId(o.child);
nearui.add(vi);
edgeCount++;
TIntDoubleHashMap fvui = new TIntDoubleHashMap(o.fd.size());
for (Map.Entry<Feature, Double> e : o.fd.entrySet()) {
fvui.put(this.featureTab.getId(e.getKey()), e.getValue());
}
fui.put(vi, fvui);
}
}
use of gnu.trove.map.hash.TIntObjectHashMap in project GDSC-SMLM by aherbert.
the class BenchmarkFilterAnalysis method getCoordinates.
private TIntObjectHashMap<IdPeakResult[]> getCoordinates(List<PeakResult> list) {
final TIntObjectHashMap<IdPeakResult[]> coords = new TIntObjectHashMap<IdPeakResult[]>();
if (list.size() > 0) {
// Do not use HashMap directly to build the coords object since there
// will be many calls to getEntry(). Instead sort the results and use
// a new list for each time point
Collections.sort(list);
int last = -1;
int id = 0;
int uniqueId = 0;
ArrayList<PeakResult> tmp = new ArrayList<PeakResult>();
// Add the results to the lists
for (PeakResult p : list) {
if (last != p.getFrame()) {
if (!tmp.isEmpty()) {
coords.put(last, tmp.toArray(new IdPeakResult[tmp.size()]));
}
id = 0;
tmp.clear();
}
last = p.getFrame();
tmp.add(new IdPeakResult(id++, uniqueId++, p));
}
if (!tmp.isEmpty()) {
coords.put(last, tmp.toArray(new IdPeakResult[tmp.size()]));
}
}
return coords;
}
use of gnu.trove.map.hash.TIntObjectHashMap in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFilter method run.
private BenchmarkFilterResult run(FitEngineConfiguration config, boolean relativeDistances, boolean batchSummary) {
if (Utils.isInterrupted())
return null;
MaximaSpotFilter spotFilter = config.createSpotFilter(relativeDistances);
// Extract all the results in memory into a list per frame. This can be cached
if (// || lastRelativeDistances != relativeDistances)
lastId != simulationParameters.id) {
// Always use float coordinates.
// The Worker adds a pixel offset for the spot coordinates.
TIntObjectHashMap<ArrayList<Coordinate>> coordinates = ResultsMatchCalculator.getCoordinates(results.getResults(), false);
actualCoordinates = new TIntObjectHashMap<PSFSpot[]>();
lastId = simulationParameters.id;
//lastRelativeDistances = relativeDistances;
// Store these so we can reset them
final int total = totalProgress;
final String prefix = progressPrefix;
// Spot PSFs may overlap so we must determine the amount of signal overlap and amplitude effect
// for each spot...
IJ.showStatus("Computing PSF overlap ...");
final int nThreads = Prefs.getThreads();
final BlockingQueue<Integer> jobs = new ArrayBlockingQueue<Integer>(nThreads * 2);
List<OverlapWorker> workers = new LinkedList<OverlapWorker>();
List<Thread> threads = new LinkedList<Thread>();
for (int i = 0; i < nThreads; i++) {
OverlapWorker worker = new OverlapWorker(jobs, coordinates);
Thread t = new Thread(worker);
workers.add(worker);
threads.add(t);
t.start();
}
// Process the frames
totalProgress = coordinates.size();
stepProgress = Utils.getProgressInterval(totalProgress);
progress = 0;
coordinates.forEachKey(new TIntProcedure() {
public boolean execute(int value) {
put(jobs, value);
return true;
}
});
// Finish all the worker threads by passing in a null job
for (int i = 0; i < threads.size(); i++) {
put(jobs, -1);
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (InterruptedException e) {
e.printStackTrace();
}
actualCoordinates.putAll(workers.get(i).coordinates);
}
threads.clear();
IJ.showProgress(-1);
IJ.showStatus("");
setupProgress(total, prefix);
}
if (!batchMode)
IJ.showStatus("Computing results ...");
final ImageStack stack = imp.getImageStack();
float background = 0;
if (spotFilter.isAbsoluteIntensity()) {
// To allow the signal factor to be computed we need to lower the image by the background so
// that the intensities correspond to the results amplitude.
// Just assume the background is uniform.
double sum = 0;
for (PeakResult r : results) sum += r.getBackground();
background = (float) (sum / results.size());
}
// Create a pool of workers
final int nThreads = Prefs.getThreads();
BlockingQueue<Integer> jobs = new ArrayBlockingQueue<Integer>(nThreads * 2);
List<Worker> workers = new LinkedList<Worker>();
List<Thread> threads = new LinkedList<Thread>();
for (int i = 0; i < nThreads; i++) {
Worker worker = new Worker(jobs, stack, spotFilter, background);
Thread t = new Thread(worker);
workers.add(worker);
threads.add(t);
t.start();
}
// Fit the frames
for (int i = 1; i <= stack.getSize(); i++) {
put(jobs, i);
}
// Finish all the worker threads by passing in a null job
for (int i = 0; i < threads.size(); i++) {
put(jobs, -1);
}
// Wait for all to finish
for (int i = 0; i < threads.size(); i++) {
try {
threads.get(i).join();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
threads.clear();
if (Utils.isInterrupted())
return null;
if (!batchMode) {
IJ.showProgress(-1);
IJ.showStatus("Collecting results ...");
}
TIntObjectHashMap<FilterResult> filterResults = new TIntObjectHashMap<FilterResult>();
time = 0;
for (Worker w : workers) {
time += w.time;
filterResults.putAll(w.results);
}
// Show a table of the results
BenchmarkFilterResult filterResult = summariseResults(filterResults, config, spotFilter, relativeDistances, batchSummary);
if (!batchMode)
IJ.showStatus("");
return filterResult;
}
use of gnu.trove.map.hash.TIntObjectHashMap in project mixcr by milaboratory.
the class FullSeqAssembler method calculateRawData.
public RawVariantsData calculateRawData(Supplier<OutputPort<VDJCAlignments>> alignments) {
if (!sequenceToVariantId.isEmpty())
throw new IllegalStateException();
for (byte letter = 0; letter < NucleotideSequence.ALPHABET.basicSize(); letter++) {
NucleotideSequence seq = new NucleotideSequence(new byte[] { letter });
sequenceToVariantId.put(seq, letter);
variantIdToSequence.put(letter, seq);
}
sequenceToVariantId.put(NucleotideSequence.EMPTY, NucleotideSequence.ALPHABET.basicSize());
variantIdToSequence.put(NucleotideSequence.ALPHABET.basicSize(), NucleotideSequence.EMPTY);
TIntIntHashMap coverage = new TIntIntHashMap();
TIntObjectHashMap<TIntObjectHashMap<VariantAggregator>> variants = new TIntObjectHashMap<>();
int nAlignments = 0;
for (VDJCAlignments al : CUtils.it(alignments.get())) {
++nAlignments;
for (PointSequence point : toPointSequences(al)) {
int seqIndex = getVariantIndex(point.sequence.getSequence());
coverage.adjustOrPutValue(point.point, 1, 1);
TIntObjectHashMap<VariantAggregator> map = variants.get(point.point);
if (map == null)
variants.put(point.point, map = new TIntObjectHashMap<>());
VariantAggregator var = map.get(seqIndex);
if (var == null)
map.put(point.point, var = new VariantAggregator());
var.count += 1;
var.sumQuality += 0x7F & point.quality;
}
}
assert nAlignments > 0;
long[] forSort = new long[coverage.size()];
TIntIntIterator iterator = coverage.iterator();
int i = 0;
while (iterator.hasNext()) {
iterator.advance();
forSort[i++] = -((((long) iterator.value()) << 32) | iterator.key());
}
Arrays.sort(forSort);
int[] pointsArray = Arrays.stream(forSort).mapToInt(l -> (int) (-l)).toArray();
TIntIntHashMap revIndex = new TIntIntHashMap();
for (int j = 0; j < pointsArray.length; j++) revIndex.put(pointsArray[j], j);
int[] coverageArray = Arrays.stream(forSort).mapToInt(l -> (int) ((-l) >> 32)).toArray();
int[][] packedData = new int[pointsArray.length][nAlignments];
for (int[] aPackedData : packedData) Arrays.fill(aPackedData, -1);
i = 0;
for (VDJCAlignments al : CUtils.it(alignments.get())) {
for (PointSequence point : toPointSequences(al)) {
int pointIndex = revIndex.get(point.point);
packedData[pointIndex][i] = (sequenceToVariantId.get(point.sequence.getSequence()) << 8) | (0xFF & point.quality);
}
i++;
}
return new RawVariantsData(nAlignments, pointsArray, coverageArray) {
@Override
OutputPort<int[]> createPort() {
return CUtils.asOutputPort(Arrays.asList(packedData));
}
};
}
use of gnu.trove.map.hash.TIntObjectHashMap in project Random-Things by lumien231.
the class PortKeyMesh method getModelLocation.
@Override
public ModelResourceLocation getModelLocation(ItemStack stack) {
NBTTagCompound camo = stack.getSubCompound("camo");
if (camo != null) {
ItemStack camoStack = new ItemStack(camo.getCompoundTag("stack"));
Item item = camoStack.getItem();
int meta = camoStack.getItemDamage();
Map<IRegistryDelegate<Item>, TIntObjectHashMap<ModelResourceLocation>> modelMap = ReflectionUtil.getModelMap();
if (modelMap != null && modelMap.containsKey(item.delegate)) {
TIntObjectHashMap<ModelResourceLocation> metaMap = modelMap.get(item.delegate);
if (metaMap.contains(meta)) {
return metaMap.get(meta);
}
}
}
return new ModelResourceLocation("randomthings:portkey");
}
Aggregations