use of gnu.trove.map.hash.TIntIntHashMap in project scheduler by btrplace.
the class BanSplitterTest method simpleTest.
@Test
public void simpleTest() {
BanSplitter splitter = new BanSplitter();
List<Instance> instances = new ArrayList<>();
Model origin = new DefaultModel();
Node n1 = origin.newNode();
Node n2 = origin.newNode();
VM vm1 = origin.newVM();
VM vm2 = origin.newVM();
VM vm3 = origin.newVM();
VM vm4 = origin.newVM();
/**
* READY: vm1
* n1 vm2
* n2 (vm3) vm4
*/
origin.getMapping().addOnlineNode(n1);
origin.getMapping().addReadyVM(vm1);
origin.getMapping().addRunningVM(vm2, n1);
origin.getMapping().addOnlineNode(n2);
origin.getMapping().addSleepingVM(vm3, n2);
origin.getMapping().addRunningVM(vm4, n2);
Model m0 = new DefaultModel();
m0.newNode(n1.id());
m0.newVM(vm1.id());
m0.newVM(vm2.id());
m0.getMapping().addOnlineNode(n1);
m0.getMapping().addReadyVM(vm1);
m0.getMapping().addRunningVM(vm2, n1);
Model m1 = new DefaultModel();
m1.newNode(n2.id());
m1.newVM(vm3.id());
m1.newVM(vm4.id());
m1.getMapping().addOnlineNode(n2);
m1.getMapping().addSleepingVM(vm3, n2);
m1.getMapping().addRunningVM(vm4, n2);
instances.add(new Instance(m0, new ArrayList<>(), new MinMTTR()));
instances.add(new Instance(m1, new ArrayList<>(), new MinMTTR()));
Set<VM> all = new HashSet<>(m0.getMapping().getAllVMs());
all.addAll(m1.getMapping().getAllVMs());
TIntIntHashMap vmIndex = Instances.makeVMIndex(instances);
TIntIntHashMap nodeIndex = Instances.makeNodeIndex(instances);
// Only VMs & nodes in m0
Ban single = new Ban(vm1, m0.getMapping().getAllNodes());
Assert.assertTrue(splitter.split(single, null, instances, vmIndex, nodeIndex));
Assert.assertTrue(instances.get(0).getSatConstraints().contains(single));
Assert.assertFalse(instances.get(1).getSatConstraints().contains(single));
// All the VMs, nodes in m1.
Ban among = new Ban(vm3, m1.getMapping().getAllNodes());
Assert.assertTrue(splitter.split(among, null, instances, vmIndex, nodeIndex));
Assert.assertTrue(instances.get(0).getSatConstraints().contains(new Ban(vm1, m0.getMapping().getAllNodes())));
Assert.assertTrue(instances.get(1).getSatConstraints().contains(new Ban(vm3, m1.getMapping().getAllNodes())));
}
use of gnu.trove.map.hash.TIntIntHashMap in project scheduler by btrplace.
the class PreserveSplitterTest method simpleTest.
@Test
public void simpleTest() {
PreserveSplitter splitter = new PreserveSplitter();
List<Instance> instances = new ArrayList<>();
Model m0 = new DefaultModel();
VM v = m0.newVM(1);
m0.getMapping().addReadyVM(v);
m0.getMapping().addRunningVM(m0.newVM(2), m0.newNode(1));
Model m1 = new DefaultModel();
m1.getMapping().addReadyVM(m1.newVM(3));
m1.getMapping().addSleepingVM(m1.newVM(4), m1.newNode(2));
m1.getMapping().addRunningVM(m1.newVM(5), m1.newNode(3));
instances.add(new Instance(m0, new ArrayList<>(), new MinMTTR()));
instances.add(new Instance(m1, new ArrayList<>(), new MinMTTR()));
TIntIntHashMap index = Instances.makeVMIndex(instances);
Set<VM> all = new HashSet<>(m0.getMapping().getAllVMs());
all.addAll(m1.getMapping().getAllVMs());
// Only VMs in m0
Preserve single = new Preserve(v, "foo", 3);
Assert.assertTrue(splitter.split(single, null, instances, index, new TIntIntHashMap()));
Assert.assertTrue(instances.get(0).getSatConstraints().contains(single));
Assert.assertFalse(instances.get(1).getSatConstraints().contains(single));
}
use of gnu.trove.map.hash.TIntIntHashMap in project scheduler by btrplace.
the class RootSplitterTest method simpleTest.
@Test
public void simpleTest() {
RootSplitter splitter = new RootSplitter();
List<Instance> instances = new ArrayList<>();
Model m0 = new DefaultModel();
VM v = m0.newVM(1);
m0.getMapping().addReadyVM(v);
m0.getMapping().addRunningVM(m0.newVM(2), m0.newNode(1));
Model m1 = new DefaultModel();
m1.getMapping().addReadyVM(m1.newVM(3));
m1.getMapping().addSleepingVM(m1.newVM(4), m1.newNode(2));
m1.getMapping().addRunningVM(m1.newVM(5), m1.newNode(3));
instances.add(new Instance(m0, new ArrayList<>(), new MinMTTR()));
instances.add(new Instance(m1, new ArrayList<>(), new MinMTTR()));
Set<VM> all = new HashSet<>(m0.getMapping().getAllVMs());
all.addAll(m1.getMapping().getAllVMs());
TIntIntHashMap index = Instances.makeVMIndex(instances);
// Only VMs in m0
Root single = new Root(v);
Assert.assertTrue(splitter.split(single, null, instances, index, new TIntIntHashMap()));
Assert.assertTrue(instances.get(0).getSatConstraints().contains(single));
Assert.assertFalse(instances.get(1).getSatConstraints().contains(single));
}
use of gnu.trove.map.hash.TIntIntHashMap in project GDSC-SMLM by aherbert.
the class TcPalmAnalysis method createCumulativeCountData.
/**
* Creates the cumulative count data.
*
* @param createPlotData set to true to create the plot data arrays
* @return the cumulative count data
*/
private CumulativeCountData createCumulativeCountData(LocalList<ClusterData> clusters, boolean createPlotData) {
final TIntIntHashMap all = new TIntIntHashMap(maxT - minT + 1);
clusters.forEach(c -> c.results.forEach(peak -> all.adjustOrPutValue(peak.getFrame(), 1, 1)));
final int[] frames = all.keys();
final int[] counts = all.values();
SortUtils.sortData(counts, frames, true, false);
return new CumulativeCountData(frames, counts, createPlotData);
}
use of gnu.trove.map.hash.TIntIntHashMap in project GDSC-SMLM by aherbert.
the class ClassificationMatchCalculator method getMapper.
/**
* Gets the mapper that can create a value from a natural sequence starting from 0 for each unique
* key in the results. If the analysis is set to ignore then a single mapping to zero is created.
*
* @param allMatches the all matches
* @param fun the function to get the key value
* @param analysis the type of analysis
* @return the mapper
*/
private static Mapper getMapper(List<PointPair> allMatches, ToIntFunction<PeakResult> fun, ClassAnalysis analysis) {
if (analysis == ClassAnalysis.IGNORE) {
return Mapper.single();
}
// Find the unique values
final TIntHashSet set = new TIntHashSet();
for (final PointPair r : allMatches) {
set.add(fun.applyAsInt(((PeakResultPoint) r.getPoint1()).getPeakResult()));
set.add(fun.applyAsInt(((PeakResultPoint) r.getPoint2()).getPeakResult()));
}
// Edge case of 1 value
if (set.size() == 1) {
return Mapper.single();
}
// Map to a natural sequence from zero
final int[] keys = set.toArray();
Arrays.sort(keys);
// Check if a discrete sequence already
if (keys[keys.length - 1] - keys[0] == set.size() - 1) {
return Mapper.offset(set.size(), keys[0]);
}
// Map each key to a value starting from 0
final TIntIntHashMap map = new TIntIntHashMap(keys.length);
for (final int k : keys) {
map.put(k, map.size());
}
return new Mapper() {
@Override
public int size() {
return map.size();
}
@Override
public int map(int key) {
return map.get(key);
}
};
}
Aggregations