use of gnu.trove.map.hash.TIntIntHashMap in project scheduler by btrplace.
the class SleepingSplitterTest method simpleTest.
@Test
public void simpleTest() {
SleepingSplitter splitter = new SleepingSplitter();
List<Instance> instances = new ArrayList<>();
Model origin = new DefaultModel();
Node n1 = origin.newNode();
Node n2 = origin.newNode();
VM vm1 = origin.newVM();
VM vm2 = origin.newVM();
VM vm3 = origin.newVM();
VM vm4 = origin.newVM();
/**
* READY: vm1
* n1 vm2
* n2 (vm3) vm4
*/
origin.getMapping().addOnlineNode(n1);
origin.getMapping().addReadyVM(vm1);
origin.getMapping().addRunningVM(vm2, n1);
origin.getMapping().addOnlineNode(n2);
origin.getMapping().addSleepingVM(vm3, n2);
origin.getMapping().addRunningVM(vm4, n2);
Model m0 = new DefaultModel();
m0.newNode(n1.id());
m0.newVM(vm1.id());
m0.newVM(vm2.id());
m0.getMapping().addOnlineNode(n1);
m0.getMapping().addReadyVM(vm1);
m0.getMapping().addRunningVM(vm2, n1);
Model m1 = new DefaultModel();
m1.newNode(n2.id());
m1.newVM(vm3.id());
m1.newVM(vm4.id());
m1.getMapping().addOnlineNode(n2);
m1.getMapping().addSleepingVM(vm3, n2);
m1.getMapping().addRunningVM(vm4, n2);
instances.add(new Instance(m0, new ArrayList<>(), new MinMTTR()));
instances.add(new Instance(m1, new ArrayList<>(), new MinMTTR()));
Set<VM> all = new HashSet<>(m0.getMapping().getAllVMs());
all.addAll(m1.getMapping().getAllVMs());
TIntIntHashMap index = Instances.makeVMIndex(instances);
// Only VMs in m0
Sleeping single = new Sleeping(vm2);
Assert.assertTrue(splitter.split(single, null, instances, index, new TIntIntHashMap()));
Assert.assertTrue(instances.get(0).getSatConstraints().contains(single));
Assert.assertFalse(instances.get(1).getSatConstraints().contains(single));
}
use of gnu.trove.map.hash.TIntIntHashMap in project BoofCV by lessthanoptimal.
the class ProjectiveInitializeAllCommon method lookupInfoForMetricElevation.
/**
* Copies results into a format that's useful for projective to metric conversion
*
* @param viewIds (Output) ID of each view
* @param views (Output) Shape of images in each view
* @param cameraMatrices (Output) Found camera matrices. view[0] is skipped since it is identity
* @param observations (Output) Found observations shifted to have (0,0) center
*/
public void lookupInfoForMetricElevation(List<String> viewIds, DogArray<ElevateViewInfo> views, DogArray<DMatrixRMaj> cameraMatrices, DogArray<AssociatedTupleDN> observations) {
// Initialize all data structures to the correct size
final int numViews = utils.structure.views.size;
viewIds.clear();
views.resize(numViews);
cameraMatrices.resize(numViews - 1);
observations.resize(inlierIndexes.get(0).size);
TIntIntMap dbToCamera = new TIntIntHashMap() {
{
no_entry_value = -1;
}
};
// pre-allocate memory
for (int obsIdx = 0; obsIdx < observations.size; obsIdx++) {
observations.get(obsIdx).resize(numViews);
}
// Copy results from bundle adjustment data structures
for (int viewIdx = 0; viewIdx < numViews; viewIdx++) {
SceneStructureProjective.View pview = utils.structure.views.get(viewIdx);
if (viewIdx != 0)
cameraMatrices.get(viewIdx - 1).setTo(pview.worldToView);
else
BoofMiscOps.checkTrue(MatrixFeatures_DDRM.isIdentity(pview.worldToView, 1e-8));
String id = viewsByStructureIndex.get(viewIdx).id;
viewIds.add(id);
// See if this camera has already been assigned an index
int cameraDB = utils.dbCams.viewToCamera(id);
int cameraIdx = dbToCamera.get(cameraDB);
if (cameraIdx == -1) {
// Add this camera to the map since it's unknown
cameraIdx = dbToCamera.size();
dbToCamera.put(cameraDB, cameraIdx);
}
views.get(viewIdx).setTo(pview.width, pview.height, cameraIdx);
SceneObservations.View oview = utils.observations.views.get(viewIdx);
BoofMiscOps.checkTrue(oview.size() == observations.size);
for (int obsIdx = 0; obsIdx < observations.size; obsIdx++) {
int featureIdx = oview.getPointId(obsIdx);
BoofMiscOps.checkTrue(featureIdx != -1, "Every feature should be visible in all views");
oview.getPixel(obsIdx, observations.get(featureIdx).get(viewIdx));
}
}
}
use of gnu.trove.map.hash.TIntIntHashMap in project gephi by gephi.
the class OpenOrdLayout method initAlgo.
@Override
public void initAlgo() {
// Verify param
if (param.getIterationsSum() != 1f) {
param = Params.DEFAULT;
// throw new RuntimeException("The sum of the time for each stage must be equal to 1");
}
// Get graph
graph = graphModel.getUndirectedGraphVisible();
graph.readLock();
boolean isDynamicWeight = graphModel.getEdgeTable().getColumn("weight").isDynamic();
Interval interval = graph.getView().getTimeInterval();
try {
int numNodes = graph.getNodeCount();
// Prepare data structure - nodes and neighbors map
Node[] nodes = new Node[numNodes];
TIntFloatHashMap[] neighbors = new TIntFloatHashMap[numNodes];
// Load nodes and edges
TIntIntHashMap idMap = new TIntIntHashMap(numNodes, 1f);
org.gephi.graph.api.Node[] graphNodes = graph.getNodes().toArray();
for (int i = 0; i < numNodes; i++) {
org.gephi.graph.api.Node n = graphNodes[i];
nodes[i] = new Node(i);
nodes[i].x = n.x();
nodes[i].y = n.y();
nodes[i].fixed = n.isFixed();
OpenOrdLayoutData layoutData = new OpenOrdLayoutData(i);
n.setLayoutData(layoutData);
idMap.put(n.getStoreId(), i);
}
float highestSimilarity = Float.NEGATIVE_INFINITY;
for (Edge e : graph.getEdges()) {
int source = idMap.get(e.getSource().getStoreId());
int target = idMap.get(e.getTarget().getStoreId());
if (source != target) {
// No self-loop
float weight = (float) (isDynamicWeight ? e.getWeight(interval) : e.getWeight());
if (neighbors[source] == null) {
neighbors[source] = new TIntFloatHashMap();
}
if (neighbors[target] == null) {
neighbors[target] = new TIntFloatHashMap();
}
neighbors[source].put(target, weight);
neighbors[target].put(source, weight);
highestSimilarity = Math.max(highestSimilarity, weight);
}
}
// Reset position
boolean someFixed = false;
for (Node n : nodes) {
if (!n.fixed) {
n.x = 0;
n.y = 0;
} else {
someFixed = true;
}
}
// Recenter fixed nodes and rescale to fit into grid
if (someFixed) {
float minX = Float.POSITIVE_INFINITY;
float maxX = Float.NEGATIVE_INFINITY;
float minY = Float.POSITIVE_INFINITY;
float maxY = Float.NEGATIVE_INFINITY;
for (Node n : nodes) {
if (n.fixed) {
minX = Math.min(minX, n.x);
maxX = Math.max(maxX, n.x);
minY = Math.min(minY, n.y);
maxY = Math.max(maxY, n.y);
}
}
float shiftX = minX + (maxX - minX) / 2f;
float shiftY = minY + (maxY - minY) / 2f;
float ratio = Math.min(DensityGrid.getViewSize() / (maxX - minX), DensityGrid.getViewSize() / (maxY - minY));
ratio = Math.min(1f, ratio);
for (Node n : nodes) {
if (n.fixed) {
n.x = (float) (n.x - shiftX) * ratio;
n.y = (float) (n.y - shiftY) * ratio;
}
}
}
// Init control and workers
control = new Control();
combine = new Combine(this);
barrier = new CyclicBarrier(numThreads, combine);
control.setEdgeCut(edgeCut);
control.setRealParm(realTime);
control.setProgressTicket(progressTicket);
control.initParams(param, numIterations);
control.setNumNodes(numNodes);
control.setHighestSimilarity(highestSimilarity);
workers = new Worker[numThreads];
for (int i = 0; i < numThreads; ++i) {
workers[i] = new Worker(i, numThreads, barrier);
workers[i].setRandom(new Random(randSeed));
control.initWorker(workers[i]);
}
// Deep copy of a partition of all neighbors for each workers
for (Worker w : workers) {
Node[] nodesCopy = new Node[nodes.length];
for (int i = 0; i < nodes.length; i++) {
nodesCopy[i] = nodes[i].clone();
}
TIntFloatHashMap[] neighborsCopy = new TIntFloatHashMap[numNodes];
for (int i = 0; i < neighbors.length; i++) {
if (i % numThreads == w.getId() && neighbors[i] != null) {
int neighborsCount = neighbors[i].size();
neighborsCopy[i] = new TIntFloatHashMap(neighborsCount, 1f);
for (TIntFloatIterator itr = neighbors[i].iterator(); itr.hasNext(); ) {
itr.advance();
float weight = normalizeWeight(itr.value(), highestSimilarity);
neighborsCopy[i].put(itr.key(), weight);
}
}
}
w.setPositions(nodesCopy);
w.setNeighbors(neighborsCopy);
}
// Add real nodes
for (Node n : nodes) {
if (n.fixed) {
for (Worker w : workers) {
w.getDensityGrid().add(n, w.isFineDensity());
}
}
}
running = true;
firstIteration = true;
} finally {
graph.readUnlockAll();
}
}
use of gnu.trove.map.hash.TIntIntHashMap in project OpenTripPlanner by opentripplanner.
the class RaptorWorker method runRaptor.
/**
* @param accessTimes a map from transit stops to the time it takes to reach those stops
* @param nonTransitTimes the time to reach all targets without transit. Targets can be vertices or points/samples.
*/
public PropagatedTimesStore runRaptor(Graph graph, TIntIntMap accessTimes, int[] nonTransitTimes, TaskStatistics ts) {
long beginCalcTime = System.currentTimeMillis();
TIntIntMap initialStops = new TIntIntHashMap();
TIntIntIterator initialIterator = accessTimes.iterator();
while (initialIterator.hasNext()) {
initialIterator.advance();
int stopIndex = initialIterator.key();
int accessTime = initialIterator.value();
initialStops.put(stopIndex, accessTime);
}
PropagatedTimesStore propagatedTimesStore = new PropagatedTimesStore(graph, this.req, data.nTargets);
// optimization: if no schedules, only run Monte Carlo
int fromTime = req.fromTime;
int monteCarloDraws = MONTE_CARLO_COUNT_PER_MINUTE;
if (!data.hasSchedules) {
// only do one iteration
fromTime = req.toTime - 60;
monteCarloDraws = TOTAL_MONTE_CARLO_COUNT;
}
// if no frequencies, don't run Monte Carlo
int iterations = (req.toTime - fromTime - 60) / 60 + 1;
// if we multiply when we're not doing monte carlo, we'll end up with too many iterations.
if (data.hasFrequencies)
// we add 2 because we do two "fake" draws where we do min or max instead of a monte carlo draw
iterations *= (monteCarloDraws + 2);
ts.searchCount = iterations;
// Iterate backward through minutes (range-raptor) taking a snapshot of router state after each call
int[][] timesAtTargetsEachIteration = new int[iterations][data.nTargets];
// for each iteration, whether it is the result of a schedule or Monte Carlo search, or whether it is an extrema.
// extrema are not included in averages.
boolean[] includeIterationInAverages = new boolean[iterations];
Arrays.fill(includeIterationInAverages, true);
// TODO don't hardwire timestep below
ts.timeStep = 60;
// times at targets from scheduled search
int[] scheduledTimesAtTargets = new int[data.nTargets];
Arrays.fill(scheduledTimesAtTargets, UNREACHED);
// current iteration
int iteration = 0;
// FIXME this should be changed to tolerate a zero-width time range
for (int departureTime = req.toTime - 60, n = 0; departureTime >= fromTime; departureTime -= 60, n++) {
if (n % 15 == 0) {
LOG.info("minute {}", n);
}
// run the scheduled search
this.runRaptorScheduled(initialStops, departureTime);
this.doPropagation(bestNonTransferTimes, scheduledTimesAtTargets, departureTime);
// walking a block
for (int i = 0; i < scheduledTimesAtTargets.length; i++) {
if (nonTransitTimes[i] != UNREACHED && nonTransitTimes[i] + departureTime < scheduledTimesAtTargets[i])
scheduledTimesAtTargets[i] = nonTransitTimes[i] + departureTime;
}
// run the frequency searches
if (data.hasFrequencies) {
for (int i = 0; i < monteCarloDraws + 2; i++) {
// make copies for just this search. We need copies because we can't use dynamic
// programming/range-raptor with randomized schedules
int[] bestTimesCopy = Arrays.copyOf(bestTimes, bestTimes.length);
int[] bestNonTransferTimesCopy = Arrays.copyOf(bestNonTransferTimes, bestNonTransferTimes.length);
int[] previousPatternsCopy = Arrays.copyOf(previousPatterns, previousPatterns.length);
// special cases: calculate the best and the worst cases as well
// Note that this (intentionally) does not affect searches where the user has requested
// an assumption other than RANDOM, or stops with transfer rules.
RaptorWorkerTimetable.BoardingAssumption requestedBoardingAssumption = req.boardingAssumption;
if (i == 0 && req.boardingAssumption == RaptorWorkerTimetable.BoardingAssumption.RANDOM) {
req.boardingAssumption = RaptorWorkerTimetable.BoardingAssumption.WORST_CASE;
// don't include extrema in averages
includeIterationInAverages[iteration] = false;
} else if (i == 1 && req.boardingAssumption == RaptorWorkerTimetable.BoardingAssumption.RANDOM) {
req.boardingAssumption = RaptorWorkerTimetable.BoardingAssumption.BEST_CASE;
// don't include extrema in averages
includeIterationInAverages[iteration] = false;
} else if (requestedBoardingAssumption == RaptorWorkerTimetable.BoardingAssumption.RANDOM)
// use a new Monte Carlo draw each time
// included in averages by default
offsets.randomize();
this.runRaptorFrequency(departureTime, bestTimesCopy, bestNonTransferTimesCopy, previousPatternsCopy);
req.boardingAssumption = requestedBoardingAssumption;
// do propagation
int[] frequencyTimesAtTargets = timesAtTargetsEachIteration[iteration++];
System.arraycopy(scheduledTimesAtTargets, 0, frequencyTimesAtTargets, 0, scheduledTimesAtTargets.length);
// updates timesAtTargetsEachIteration directly because it has a reference into the array.
this.doPropagation(bestNonTransferTimesCopy, frequencyTimesAtTargets, departureTime);
// convert to elapsed time
for (int t = 0; t < frequencyTimesAtTargets.length; t++) {
if (frequencyTimesAtTargets[t] != UNREACHED)
frequencyTimesAtTargets[t] -= departureTime;
}
}
} else {
final int dt = departureTime;
timesAtTargetsEachIteration[iteration++] = IntStream.of(scheduledTimesAtTargets).map(i -> i != UNREACHED ? i - dt : i).toArray();
}
}
// iteration should be incremented past end of array by ++ in assignment above
if (iteration != iterations)
throw new IllegalStateException("Iterations did not completely fill output array");
long calcTime = System.currentTimeMillis() - beginCalcTime;
LOG.info("calc time {}sec", calcTime / 1000.0);
LOG.info(" propagation {}sec", totalPropagationTime / 1000.0);
LOG.info(" raptor {}sec", (calcTime - totalPropagationTime) / 1000.0);
ts.propagation = (int) totalPropagationTime;
ts.transitSearch = (int) (calcTime - totalPropagationTime);
// dumpVariableByte(timesAtTargetsEachMinute);
// we can use min_max here as we've also run it once with best case and worst case board,
// so the best and worst cases are meaningful.
propagatedTimesStore.setFromArray(timesAtTargetsEachIteration, includeIterationInAverages, PropagatedTimesStore.ConfidenceCalculationMethod.MIN_MAX);
return propagatedTimesStore;
}
use of gnu.trove.map.hash.TIntIntHashMap in project OpenTripPlanner by opentripplanner.
the class RaptorWorkerData method findStopsNear.
/**
* find stops from a given SPT, including temporary stops. If useTimes is true, use times from the SPT, otherwise use distances
*/
public TIntIntMap findStopsNear(ShortestPathTree spt, Graph graph, boolean useTimes, float walkSpeed) {
TIntIntMap accessTimes = new TIntIntHashMap();
for (TransitStop tstop : graph.index.stopVertexForStop.values()) {
State s = spt.getState(tstop);
if (s != null) {
// note that we calculate the time based on the walk speed here rather than
// based on the time. this matches what we do in the stop tree cache.
int stopIndex = indexForStop.get(tstop.getIndex());
if (stopIndex != -1) {
if (useTimes)
accessTimes.put(stopIndex, (int) s.getElapsedTimeSeconds());
else
accessTimes.put(stopIndex, (int) (s.getWalkDistance() / walkSpeed));
}
}
}
// and handle the additional stops
for (TObjectIntIterator<AddTripPattern.TemporaryStop> it = addedStops.iterator(); it.hasNext(); ) {
it.advance();
AddTripPattern.TemporaryStop tstop = it.key();
if (tstop.sample == null) {
continue;
}
double dist = Double.POSITIVE_INFINITY;
if (tstop.sample.v0 != null) {
State s0 = spt.getState(tstop.sample.v0);
if (s0 != null) {
dist = s0.getWalkDistance() + tstop.sample.d0;
}
}
if (tstop.sample.v1 != null) {
State s1 = spt.getState(tstop.sample.v1);
if (s1 != null) {
double d1 = s1.getWalkDistance() + tstop.sample.d1;
dist = Double.isInfinite(dist) ? d1 : Math.min(d1, dist);
}
}
if (Double.isInfinite(dist))
continue;
// NB using the index in the worker data not the index in the graph!
accessTimes.put(it.value(), (int) (dist / walkSpeed));
}
return accessTimes;
}
Aggregations