use of gnu.trove.map.hash.TIntObjectHashMap in project FoamFix by asiekierka.
the class FoamFixModelRegistryDuplicateWipe method onTextureStitchPost.
@SubscribeEvent
public void onTextureStitchPost(TextureStitchEvent.Post event) {
ItemModelMesher imm = Minecraft.getMinecraft().getRenderItem().getItemModelMesher();
BlockModelShapes bms = Minecraft.getMinecraft().getBlockRendererDispatcher().getBlockModelShapes();
ModelManager mgr = bms.getModelManager();
Field f = ReflectionHelper.findField(ModelManager.class, "modelRegistry", "field_174958_a");
try {
IRegistry<ModelResourceLocation, IBakedModel> registry = (IRegistry<ModelResourceLocation, IBakedModel>) f.get(mgr);
FoamFix.logger.info("Clearing unnecessary model registry of size " + registry.getKeys().size() + ".");
for (ModelResourceLocation l : registry.getKeys()) {
registry.putObject(l, ProxyClient.DUMMY_MODEL);
}
} catch (Exception e) {
e.printStackTrace();
}
f = ReflectionHelper.findField(BlockModelShapes.class, "bakedModelStore", "field_178129_a");
try {
Map<IBlockState, IBakedModel> modelStore = (Map<IBlockState, IBakedModel>) f.get(bms);
FoamFix.logger.info("Clearing unnecessary model store of size " + modelStore.size() + ".");
modelStore.clear();
} catch (Exception e) {
e.printStackTrace();
}
if (imm instanceof ItemModelMesherForge) {
f = ReflectionHelper.findField(ItemModelMesherForge.class, "models");
try {
Map<IRegistryDelegate<Item>, TIntObjectHashMap<IBakedModel>> modelStore = (Map<IRegistryDelegate<Item>, TIntObjectHashMap<IBakedModel>>) f.get(imm);
FoamFix.logger.info("Clearing unnecessary item shapes cache of size " + modelStore.size() + ".");
modelStore.clear();
} catch (Exception e) {
e.printStackTrace();
}
}
}
use of gnu.trove.map.hash.TIntObjectHashMap in project GDSC-SMLM by aherbert.
the class ResultsMatchCalculator method getTimepoints.
/**
* Merge the time points from each map into a single sorted list of unique time points.
*
* @param actualCoordinates the actual coordinates
* @param predictedCoordinates the predicted coordinates
* @return a list of time points
*/
private static int[] getTimepoints(TIntObjectHashMap<List<Coordinate>> actualCoordinates, TIntObjectHashMap<List<Coordinate>> predictedCoordinates) {
// Do inline to avoid materialising the keys arrays
final TIntHashSet hashset = new TIntHashSet(Math.max(actualCoordinates.size(), predictedCoordinates.size()));
final TIntProcedure p = value -> {
hashset.add(value);
return true;
};
actualCoordinates.forEachKey(p);
predictedCoordinates.forEachKey(p);
final int[] set = hashset.toArray();
Arrays.sort(set);
return set;
}
use of gnu.trove.map.hash.TIntObjectHashMap in project GDSC-SMLM by aherbert.
the class ResultsMatchCalculator method getCoordinates.
/**
* Build a map between the peak id (time point) and a list of coordinates.
*
* @param results the results
* @param coordinateMethod the coordinate method
* @param integerCoordinates True if the values should be rounded down to integers
* @return the coordinates
*/
public static TIntObjectHashMap<List<Coordinate>> getCoordinates(MemoryPeakResults results, CoordinateMethod coordinateMethod, final boolean integerCoordinates) {
final TIntObjectHashMap<List<Coordinate>> coords = new TIntObjectHashMap<>();
if (results.size() > 0) {
// Do not use HashMap directly to build the coords object since there
// will be many calls to getEntry(). Instead sort the results and use
// a new list for each time point
results.sort();
final int minT = results.getFirstFrame();
final int maxT = results.getLastFrame();
// Create lists
final ArrayList<ArrayList<Coordinate>> tmpCoords = new ArrayList<>(maxT - minT + 1);
for (int t = minT; t <= maxT; t++) {
tmpCoords.add(new ArrayList<Coordinate>());
}
// Add the results to the lists
results.forEach((PeakResultProcedure) result -> {
final float x;
final float y;
final float z;
if (integerCoordinates) {
x = (int) result.getXPosition();
y = (int) result.getYPosition();
z = (int) result.getZPosition();
} else {
x = result.getXPosition();
y = result.getYPosition();
z = result.getZPosition();
}
final int startFrame = getStartFrame(result, coordinateMethod);
final int endFrame = getEndFrame(result, coordinateMethod);
for (int t = startFrame - minT, i = endFrame - startFrame + 1; i-- > 0; t++) {
tmpCoords.get(t).add(new PeakResultPoint(t + minT, x, y, z, result));
}
});
// Put in the map
for (int t = minT, i = 0; t <= maxT; t++, i++) {
coords.put(t, tmpCoords.get(i));
}
}
return coords;
}
use of gnu.trove.map.hash.TIntObjectHashMap in project GDSC-SMLM by aherbert.
the class ClassificationMatchCalculator method runCompareClassifications.
private void runCompareClassifications(MemoryPeakResults results1, MemoryPeakResults results2) {
final List<PointPair> allMatches = new LinkedList<>();
// Optionally exclude results which do not have an id and/or category
Predicate<PeakResult> test = settings.useId == ClassAnalysis.IGNORE_ZERO ? r -> r.getId() != 0 : null;
if (settings.useCategory == ClassAnalysis.IGNORE_ZERO) {
final Predicate<PeakResult> test2 = r -> r.getCategory() != 0;
test = test == null ? test2 : test.and(test2);
} else if (test == null) {
test = r -> true;
}
// Divide the results into time points
final TIntObjectHashMap<List<PeakResultPoint>> coordinates1 = getCoordinates(results1, test);
final TIntObjectHashMap<List<PeakResultPoint>> coordinates2 = getCoordinates(results2, test);
// Process each time point
int n1 = 0;
int n2 = 0;
for (final int t : getTimepoints(coordinates1, coordinates2)) {
final Coordinate[] c1 = getCoordinates(coordinates1, t);
final Coordinate[] c2 = getCoordinates(coordinates2, t);
n1 += c1.length;
n2 += c2.length;
final List<PointPair> matches = new LinkedList<>();
MatchCalculator.analyseResults3D(c1, c2, settings.matchDistance, null, null, null, matches);
allMatches.addAll(matches);
}
if (allMatches.isEmpty()) {
IJ.error(TITLE, "No localisation matches between the two results sets");
return;
}
// Get the unique Ids and Categories in the matches.
final Mapper ids = getMapper(allMatches, PeakResult::getId, settings.useId);
final Mapper cats = getMapper(allMatches, PeakResult::getCategory, settings.useCategory);
// Map id/category to an index = stride * cat + id
final int stride = ids.size();
// Any integer is allowed as an index
if ((long) stride * cats.size() > 1L << 32) {
IJ.error(TITLE, "Too many combinations of id and category to assigne unique labels");
return;
}
// Extract indices
final int[] set1 = new int[allMatches.size()];
final int[] set2 = new int[allMatches.size()];
int i = 0;
for (final PointPair r : allMatches) {
set1[i] = toIndex(stride, ids, cats, ((PeakResultPoint) r.getPoint1()).getPeakResult());
set2[i] = toIndex(stride, ids, cats, ((PeakResultPoint) r.getPoint2()).getPeakResult());
i++;
}
final Resequencer re = new Resequencer();
re.setCacheMap(true);
re.renumber(set1);
re.renumber(set2);
// Compare
final RandIndex r = new RandIndex().compute(set1, set2);
final TextWindow resultsWindow = ImageJUtils.refresh(resultsWindowRef, () -> new TextWindow(TITLE + " Results", "Results1\tResults2\tID\tCategory\tn1\tc1\tn2\tc2\tMatched\tRand Index\tAdjusted RI", "", 900, 300));
try (BufferedTextWindow bw = new BufferedTextWindow(resultsWindow)) {
final StringBuilder sb = new StringBuilder(2048);
sb.append(results1.getName()).append('\t');
sb.append(results2.getName()).append('\t');
sb.append(ANALYSIS_OPTION[settings.useId.ordinal()]).append('\t');
sb.append(ANALYSIS_OPTION[settings.useCategory.ordinal()]).append('\t');
sb.append(n1).append('\t');
sb.append(MathUtils.max(set1) + 1).append('\t');
sb.append(n2).append('\t');
sb.append(MathUtils.max(set2) + 1).append('\t');
sb.append(set1.length).append('\t');
sb.append(MathUtils.rounded(r.getRandIndex())).append('\t');
sb.append(MathUtils.rounded(r.getAdjustedRandIndex())).append('\t');
bw.append(sb.toString());
}
}
use of gnu.trove.map.hash.TIntObjectHashMap in project GDSC-SMLM by aherbert.
the class BenchmarkSpotFit method subsetFilterResults.
/**
* Extract all the filter candidates in order until the desired number of positives have been
* reached and the number of negatives matches the configured parameters.
*
* @param filterResults the filter results
* @param fitting the fitting
* @return The filter candidate data
*/
private CandidateData subsetFilterResults(TIntObjectHashMap<FilterResult> filterResults, int fitting) {
// Convert fractions from percent
final double f1 = Math.min(1, settings.fractionPositives / 100.0);
final double f2 = settings.fractionNegativesAfterAllPositives / 100.0;
final int[] counter = new int[2];
final TIntObjectHashMap<FilterCandidates> subset = new TIntObjectHashMap<>();
final double[] fX = new double[2];
final int[] nX = new int[2];
filterResults.forEachEntry((frame, result) -> {
// Determine the number of positives to find. This score may be fractional.
fX[0] += result.result.getTruePositives();
fX[1] += result.result.getFalsePositives();
// Q. Is r.result.getTruePositives() not the same as the total of r.spots[i].match?
// A. Not if we used fractional scoring.
int count = 0;
for (int i = result.spots.length; i-- > 0; ) {
if (result.spots[i].match) {
count++;
}
}
nX[0] += count;
nX[1] += (result.spots.length - count);
// Make the target use the fractional score
final double np2 = result.result.getTruePositives() * f1;
double targetP = np2;
// Set the target using the closest
if (f1 < 1) {
double np = 0;
double min = result.result.getTruePositives();
for (final ScoredSpot spot : result.spots) {
if (spot.match) {
np += spot.getScore();
final double d = np2 - np;
if (d < min) {
min = d;
targetP = np;
} else {
break;
}
}
}
}
// Count the number of positive & negatives
int pos = 0;
int neg = 0;
double np = 0;
double nn = 0;
boolean reachedTarget = false;
int countAfter = 0;
count = 0;
for (final ScoredSpot spot : result.spots) {
count++;
nn += spot.antiScore();
if (spot.match) {
np += spot.getScore();
pos++;
if (!reachedTarget) {
reachedTarget = np >= targetP;
}
} else {
neg++;
if (reachedTarget) {
countAfter++;
}
}
// Check if we have reached both the limits
if (reachedTarget && countAfter >= settings.negativesAfterAllPositives && (double) neg / (neg + pos) >= f2) {
break;
}
}
counter[0] += count;
counter[1] += result.spots.length;
// We can use all the candidates but only fit up to count
subset.put(frame, new FilterCandidates(pos, neg, np, nn, result.spots, count));
return true;
});
// We now add all the candidates but only fit the first N
final int target = counter[0];
final int total = counter[1];
final int added = total - target;
if (extraOptions && added > target) {
ImageJUtils.log("Added %s to %s (total = %d)", TextUtils.pleural(added, "neighbour"), TextUtils.pleural(target, "candidate"), total);
}
return new CandidateData(subset, filterResult.id, fX[0], fX[1], nX[0], nX[1], settings, fitting);
}
Aggregations