use of boofcv.abst.tracker.PointTrack in project BoofCV by lessthanoptimal.
the class DetectDescribeAssociateTracker method dropTrackIndexInAll.
/**
* Given the index of the track in the `all` list, drop it from the tracker
*/
private PointTrack dropTrackIndexInAll(int indexInAll) {
PointTrack track = tracksAll.removeSwap(indexInAll);
// the track may or may not be in the active list
boolean found = tracksActive.remove(track);
found |= tracksInactive.remove(track);
// if a track has been drop it must be in `all` and `active` OR `inactive` lists
assert found;
return track;
}
use of boofcv.abst.tracker.PointTrack in project BoofCV by lessthanoptimal.
the class DetectDescribeAssociateTracker method addNewTrack.
/**
* Adds a new track given its location and description
*/
protected void addNewTrack(int set, double x, double y, TD desc) {
PointTrack p = tracksAll.grow();
p.pixel.setTo(x, y);
((TD) p.getDescription()).setTo(desc);
p.spawnFrameID = frameID;
p.lastSeenFrameID = frameID;
p.detectorSetId = set;
p.featureId = featureID++;
// if( tracksActive.contains(p))
// throw new RuntimeException("Contained twice active! p.id="+p.featureId);
tracksNew.add(p);
tracksActive.add(p);
}
use of boofcv.abst.tracker.PointTrack in project BoofCV by lessthanoptimal.
the class DetectDescribeAssociateTracker method dropTracks.
public void dropTracks(PointTracker.Dropper dropper) {
for (int i = tracksAll.size() - 1; i >= 0; i--) {
PointTrack track = tracksAll.get(i);
if (!dropper.shouldDropTrack(track))
continue;
dropTrackIndexInAll(i);
}
}
use of boofcv.abst.tracker.PointTrack in project BoofCV by lessthanoptimal.
the class VisOdomMonoDepthPnP method estimateMotion.
/**
* Estimates motion from the set of tracks and their 3D location
*
* @return true if successful.
*/
private boolean estimateMotion(List<PointTrack> active) {
Point2Transform2_F64 pixelToNorm = cameraModels.get(0).pixelToNorm;
Objects.requireNonNull(framePrevious).frame_to_world.invert(world_to_prev);
// Create a list of observations for PnP
// normalized image coordinates and 3D in the previous keyframe's reference frame
observationsPnP.reset();
for (int activeIdx = 0; activeIdx < active.size(); activeIdx++) {
PointTrack pt = active.get(activeIdx);
// Build the list of tracks which are currently visible
initialVisible.add((Track) pt.cookie);
// Extract info needed to estimate motion
Point2D3D p = observationsPnP.grow();
pixelToNorm.compute(pt.pixel.x, pt.pixel.y, p.observation);
Track bt = pt.getCookie();
// Go from world coordinates to the previous frame
SePointOps_F64.transform(world_to_prev, bt.worldLoc, prevLoc4);
// Go from homogenous coordinates into 3D coordinates
PerspectiveOps.homogenousTo3dPositiveZ(prevLoc4, 1e8, 1e-7, p.location);
}
// estimate the motion up to a scale factor in translation
if (!motionEstimator.process(observationsPnP.toList()))
return false;
Se3_F64 previous_to_current;
if (refine != null) {
previous_to_current = new Se3_F64();
refine.fitModel(motionEstimator.getMatchSet(), motionEstimator.getModelParameters(), previous_to_current);
} else {
previous_to_current = motionEstimator.getModelParameters();
}
// Change everything back to the world frame
previous_to_current.invert(current_to_previous);
current_to_previous.concat(framePrevious.frame_to_world, frameCurrent.frame_to_world);
return true;
}
use of boofcv.abst.tracker.PointTrack in project BoofCV by lessthanoptimal.
the class VisOdomMonoDepthPnP method spawnNewTracksForNewKeyFrame.
/**
* Detects new features and computes their 3D coordinates
*
* @param visibleTracks newly spawned tracks are added to this list
*/
private void spawnNewTracksForNewKeyFrame(List<Track> visibleTracks) {
// System.out.println("addNewTracks() current frame="+frameCurrent.id);
long frameID = tracker.getFrameID();
int totalRejected = 0;
tracker.spawnTracks();
List<PointTrack> spawned = tracker.getNewTracks(null);
// estimate 3D coordinate using stereo vision
for (PointTrack pt : spawned) {
// discard point if it can't localized
if (!pixelTo3D.process(pt.pixel.x, pt.pixel.y)) {
// System.out.println("Dropped pixelTo3D tt="+pt.featureId);
totalRejected++;
tracker.dropTrack(pt);
} else {
if (bundleViso.findByTrackerTrack(pt) != null) {
// System.out.println("BUG! Tracker recycled... bt="+btrack.id+" tt="+t.featureId);
throw new RuntimeException("BUG! Recycled tracker track too early tt=" + pt.featureId);
}
// Save the track's 3D location and add it to the current frame
Track btrack = bundleViso.addTrack(pixelTo3D.getX(), pixelTo3D.getY(), pixelTo3D.getZ(), pixelTo3D.getW());
btrack.lastUsed = frameID;
btrack.visualTrack = pt;
btrack.id = pt.featureId;
pt.cookie = btrack;
// System.out.println("new track bt="+btrack.id+" tt.id="+t.featureId);
// Convert the location from local coordinate system to world coordinates
SePointOps_F64.transform(frameCurrent.frame_to_world, btrack.worldLoc, btrack.worldLoc);
// keep the scale of floats manageable and normalize the vector to have a norm of 1
// Homogeneous coordinates so the distance is determined by the ratio of w and other elements
btrack.worldLoc.normalize();
bundleViso.addObservation(frameCurrent, btrack, pt.pixel.x, pt.pixel.y);
// for (int i = 0; i < visibleTracks.size(); i++) {
// if( visibleTracks.get(i).visualTrack == t )
// throw new RuntimeException("Bug. Adding duplicate track: "+t.featureId);
// }
visibleTracks.add(btrack);
}
}
if (verbose != null)
verbose.printf("spawn: new=%d rejected=%d\n", spawned.size() - totalRejected, totalRejected);
}
Aggregations