use of org.opencv.core.Point in project FRC2018 by first95.
the class VisualGearLiftFinder method computeHeadingToTarget.
// Take in an image and process it. Call this before calling
public void computeHeadingToTarget() {
if (imageSource.isValid()) {
// imageSource.grabFrame(curFrame);
imageSource.grabFrameNoTimeout(curFrame);
if (!curFrame.empty()) {
pipeline.process(curFrame);
lastHeadingDeterminationSucceeded = false;
// Draw output of pipeline
ArrayList<Rect> boxes = pipeline.filterContoursBbOutput();
Imgproc.drawContours(curFrame, pipeline.filterContoursOutput(), -1, new Scalar(0, 0, 255));
for (Rect bb : boxes) {
Imgproc.rectangle(curFrame, bb.br(), bb.tl(), new Scalar(128, 255, 128));
heightOfObjectInPixels = bb.height;
distanceFromCamToTarget = (FOCAL_LENGTH / bb.height);
}
// If we only see one box then it's not enough information to see the target
if (boxes.size() >= 2) {
// Sort by largest to smallest (in terms of bounding box area)
boxes.sort(new Comparator<Rect>() {
@Override
public int compare(Rect o1, Rect o2) {
// so subtraction works as a shortcut.
return (int) (o2.area() - o1.area());
}
});
// Assume the two largest boxes are the ones to use.
// Find their boundaries.
// start at the right
double left_bound = curFrame.cols();
// start at the left
double right_bound = 0;
// start at the bottom
double top_bound = curFrame.rows();
// start at the top
double bottom_bound = 0;
for (int i = 0; i < 2 && i < NUM_BOXES_TO_CONSIDER; ++i) {
Rect bb = boxes.get(i);
Imgproc.putText(curFrame, "" + i, bb.tl(), 0, 0.75, new Scalar(255, 255, 255));
left_bound = Math.min(left_bound, bb.tl().x);
right_bound = Math.max(right_bound, bb.br().x);
top_bound = Math.min(top_bound, bb.tl().y);
bottom_bound = Math.max(bottom_bound, bb.br().y);
}
Imgproc.rectangle(curFrame, new Point(left_bound, top_bound), new Point(right_bound, bottom_bound), new Scalar(255, 255, 255));
// Confirm that the aspect ratio of the target is what we expect.
double aspect_ratio = (right_bound - left_bound) / (bottom_bound - top_bound);
if (aspect_ratio > TALLEST_ASPECT_RATIO && aspect_ratio < WIDEST_ASPECT_RATIO) {
// Finally, compute the heading
double target_center_in_image = (right_bound + left_bound) / 2.0;
double target_offset_from_center = target_center_in_image - (curFrame.cols() / 2.0);
Imgproc.line(curFrame, new Point(target_center_in_image, 0), new Point(target_center_in_image, curFrame.rows()), new Scalar(0, 255, 0));
SmartDashboard.putNumber("Target center (pixels)", target_offset_from_center);
lastDeterminedHeadingDegrees = target_offset_from_center * DEGREES_PER_PIXEL;
lastHeadingDeterminationSucceeded = true;
SmartDashboard.putString(REASON, "None.");
} else {
SmartDashboard.putString(REASON, "Aspect ratio of " + aspect_ratio + " outside limits.");
}
} else {
SmartDashboard.putString(REASON, "Insufficient contours passing filter");
}
curFrame.copyTo(outputFrame);
} else {
SmartDashboard.putString(REASON, "Sink not valid!");
}
}
}
use of org.opencv.core.Point in project kifu-recorder by leonardost.
the class RecordGameActivity method processBoardCorners.
private void processBoardCorners() {
Point[] cornerPoints = new Point[4];
for (int i = 0; i < 4; i++) {
Ponto ponto = boardCorners[i].getRealCornerPosition();
cornerPoints[i] = new Point(ponto.x, ponto.y);
}
boardPositionInImage = new Mat(4, 1, CvType.CV_32FC2);
boardPositionInImage.put(0, 0, cornerPoints[0].x, cornerPoints[0].y, cornerPoints[1].x, cornerPoints[1].y, cornerPoints[2].x, cornerPoints[2].y, cornerPoints[3].x, cornerPoints[3].y);
boardContour = new MatOfPoint(cornerPoints);
}
use of org.opencv.core.Point in project kifu-recorder by leonardost.
the class RecordGameActivity method updateCornerPositions.
private void updateCornerPositions(Mat image) {
Imgproc.cvtColor(image, image, Imgproc.COLOR_RGBA2BGR);
Corner[] possibleNewCorners = new Corner[4];
boolean wereAllCornersFound = true;
for (int i = 0; i < 4; i++) {
long start = System.currentTimeMillis();
possibleNewCorners[i] = cornerDetector[i].detectCornerIn(image);
long duration = System.currentTimeMillis() - start;
if (possibleNewCorners[i] != null) {
logger.addToLog(possibleNewCorners[i].toString());
} else {
logger.addToLog("Corner " + i + " was not found");
}
logger.addToLog("Time to process corner " + i + " = " + duration + "ms");
if (possibleNewCorners[i] == null) {
wereAllCornersFound = false;
}
}
Mat orthogonalBoardImage = wereAllCornersFound ? ImageUtils.generateOrthogonalBoardImage(image, possibleNewCorners) : null;
if (wereAllCornersFound && boardDetector.isBoardContainedIn(orthogonalBoardImage)) {
logger.addToLog("Board is inside contour");
int numberOfCornersThatMoved = getNumberOfCornersThatMoved(possibleNewCorners, boardCorners);
logger.addToLog("Number of corners that moved: " + numberOfCornersThatMoved);
int numberOfEmptyCornersThatMoved = getNumberOfEmptyCornersThatMoved(possibleNewCorners, boardCorners);
logger.addToLog("Number of empty corners that moved: " + numberOfEmptyCornersThatMoved);
double[] distanceToNewPoint = new double[4];
for (int i = 0; i < 4; i++) {
distanceToNewPoint[i] = possibleNewCorners[i].distanceTo(boardCorners[i]);
logger.addToLog("Distance to old corner point " + (i + 1) + " = " + distanceToNewPoint[i]);
}
for (int i = 0; i < 4; i++) {
if (numberOfCornersThatMoved < 4) {
// Update relative corner position of possible corners with stones
if (possibleNewCorners[i].isStone) {
if (!boardCorners[i].isStone) {
possibleNewCorners[i].updateDisplacementVectorRelativeTo(boardCorners[i].position);
} else {
possibleNewCorners[i].updateDisplacementVectorRelativeTo(boardCorners[i].getRealCornerPosition());
}
}
} else if (possibleNewCorners[i].isStone) {
// All corners moved together, so this is probably a board displacement and we
// don't update the corners's relative position to the real corners
possibleNewCorners[i].displacementToRealCorner = boardCorners[i].displacementToRealCorner;
}
}
Mat orthogonalBoardImage2 = ImageUtils.generateOrthogonalBoardImage(image, possibleNewCorners);
double similarity = lastValidOrtogonalBoardImage != null ? fingerprintMatching.calculateSimilatiryBetween(lastValidOrtogonalBoardImage, orthogonalBoardImage2) : -1;
logger.addToLog("Similarity between new orthogonal board image to last valid one = " + similarity);
logger.setOrtogonalBoardImage2(orthogonalBoardImage2);
if (logger.getFrameNumber() <= 3 || numberOfFramesWithDissimilarOrtogonalImages >= 5 || fingerprintMatching.areImagesSimilar(lastValidOrtogonalBoardImage, orthogonalBoardImage2)) {
// This condition should be time based and not frame based
if (numberOfFramesWithDissimilarOrtogonalImages >= 5) {
logger.addToLog("Forcing orthogonal image to be similar");
} else {
logger.addToLog("New orthogonal board image is similar to last valid one");
}
for (int i = 0; i < 4; i++) {
if (!possibleNewCorners[i].isStone) {
numberOfFramesWithoutStone[i]++;
} else {
numberOfFramesWithoutStone[i] = 0;
}
if (!boardCorners[i].isStone && !possibleNewCorners[i].isStone && numberOfCornersThatMoved < 3 && numberOfEmptyCornersThatMoved == 1) {
// This means a single empty corner moved by itself, which is not possible. This addresses a wrong
// corner detection in frame 70 of sequence 16.
logger.addToLog("Corner " + i + " - This empty corner moved by itself");
continue;
}
if (!possibleNewCorners[i].isStone && boardCorners[i].isStone && possibleNewCorners[i].distanceTo(boardCorners[i].getRealCornerPosition()) > MOVEMENT_THRESHOLD && // This condition should be time based instead of frame based, something like 2 or 3 seconds or so
numberOfFramesWithoutStone[i] < 5) {
// If a corner was a stone and is not anymore, the new empty corner should match the real corner
// position that the stone was on. This addresses a wrong corner detection in frame 74 of sequence 14.
logger.addToLog("Corner " + i + " - This now empty corner is in a wrong position");
logger.addToLog("Number of frames without stone = " + numberOfFramesWithoutStone[i]);
continue;
}
boardCorners[i] = possibleNewCorners[i];
cornerDetector[i].setCorner(possibleNewCorners[i]);
}
numberOfFramesWithDissimilarOrtogonalImages = 0;
lastValidOrtogonalBoardImage = orthogonalBoardImage2.clone();
logger.setLastValidOrtogonalBoardImage(lastValidOrtogonalBoardImage);
} else {
logger.addToLog("New orthogonal board image is NOT similar to last valid one");
numberOfFramesWithDissimilarOrtogonalImages++;
}
processBoardCorners();
state = STATE_RUNNING;
} else {
state = STATE_LOOKING_FOR_BOARD;
logger.addToLog("Board is NOT inside contour");
logger.addToLog("were all corners found = " + wereAllCornersFound);
}
logger.logCornerPositions(boardCorners);
}
use of org.opencv.core.Point in project kifu-recorder by leonardost.
the class Drawer method drawBoard.
/**
* Draws the board over matrix 'image' with origin on coordinates 'x' and 'y' passed as
* parameters and with size 'imageSize'. The drawing is done respecting the dimension of the
* board, that is, if the board has a smaller dimension, the preview is smaller. The last move
* is marked in blue. If the 'lastMove' parameter is null, the last move is not marked.
*
* @param image
* @param board
* @param x
* @param y
* @param imageSize
* @param lastMove
*/
public static void drawBoard(Mat image, Board board, int x, int y, int imageSize, Move lastMove) {
Point p1 = new Point();
Point p2 = new Point();
double distanceBetweenLines = imageSize / (board.getDimension() + 1);
double endOfLines = imageSize - distanceBetweenLines;
// was using imageSize / 20 for 9x9 board
int stoneRadius = 29 - board.getDimension();
p1.x = x;
p1.y = y;
p2.x = x + imageSize;
p2.y = y + imageSize;
Imgproc.rectangle(image, p1, p2, mBoardBrown, -1);
// Draw horizontal lines
for (int i = 0; i < board.getDimension(); ++i) {
Point start = new Point();
Point end = new Point();
start.x = x + distanceBetweenLines;
start.y = y + distanceBetweenLines + distanceBetweenLines * i;
end.x = x + endOfLines;
end.y = start.y;
Imgproc.line(image, start, end, mBlack);
}
// Draw vertical lines
for (int i = 0; i < board.getDimension(); ++i) {
Point start = new Point();
Point end = new Point();
start.x = x + distanceBetweenLines + distanceBetweenLines * i;
start.y = y + distanceBetweenLines;
end.x = start.x;
end.y = y + endOfLines;
Imgproc.line(image, start, end, mBlack);
}
// Draw stones
for (int i = 0; i < board.getDimension(); ++i) {
for (int j = 0; j < board.getDimension(); ++j) {
Point center = new Point();
center.x = x + distanceBetweenLines + j * distanceBetweenLines;
center.y = y + distanceBetweenLines + i * distanceBetweenLines;
if (board.getPosition(i, j) == Board.BLACK_STONE) {
Imgproc.circle(image, center, stoneRadius, mBlack, -1);
} else if (board.getPosition(i, j) == Board.WHITE_STONE) {
Imgproc.circle(image, center, stoneRadius, mWhite, -1);
Imgproc.circle(image, center, stoneRadius, mBlack);
}
}
}
// Mark the last move
if (lastMove != null) {
Point center = new Point();
center.x = x + distanceBetweenLines + lastMove.column * distanceBetweenLines;
center.y = y + distanceBetweenLines + lastMove.row * distanceBetweenLines;
Scalar markColor = lastMove.color == Board.BLACK_STONE ? mWhite : mBlack;
Imgproc.circle(image, center, (int) (stoneRadius * 0.6), markColor, 1);
Imgproc.circle(image, center, stoneRadius, mBlue, -1);
}
}
use of org.opencv.core.Point in project kifu-recorder by leonardost.
the class CornerDetector method detectCornerIn.
public Corner detectCornerIn(Mat image) {
System.out.println("Processing corner " + cornerIndex);
Mat regionImage = getRegionOfInterestAround(corner, image);
// Imgcodecs.imwrite("processing/corner" + cornerIndex + "_frame" + imageIndex + ".png", regionImage);
List<Corner> candidateCornerHarris = harrisCornerDetector.detectCandidateCornersIn(regionImage);
List<Corner> candidateCornerEllipsis = ellipseCornerDetector.detectCandidateCornersIn(regionImage);
// This is done to try to remove corner candidates that appear on the edge of circles
for (Iterator<Corner> it = candidateCornerHarris.iterator(); it.hasNext(); ) {
Corner corner = it.next();
for (Corner circlePoint : candidateCornerEllipsis) {
if (circlePoint.isTooCloseToCircle(corner.position)) {
it.remove();
break;
}
}
}
// Plot corners on image
Mat imageWithCornersPlotted = regionImage.clone();
for (Corner point : candidateCornerEllipsis) {
System.out.println("Candidate corner found by circle detection in image " + imageIndex + ": ");
System.out.println(point);
Imgproc.circle(imageWithCornersPlotted, new Point(point.getX(), point.getY()), 1, new Scalar(0, 255, 0), -1);
Imgproc.ellipse(imageWithCornersPlotted, point.stonePosition, new Scalar(0, 255, 255));
}
for (Corner point : candidateCornerHarris) {
System.out.println("Candidate corner found by corner Harris detection in frame " + imageIndex + ": ");
System.out.println(point);
Imgproc.circle(imageWithCornersPlotted, new Point(point.getX(), point.getY()), 1, new Scalar(0, 0, 255), -1);
}
// fileHelper.writePngImage(imageWithCornersPlotted, "corner" + cornerIndex + "_frame" + imageIndex + "_candidate_corners");
List<Corner> candidateCorners = new ArrayList<>();
candidateCorners.addAll(candidateCornerHarris);
candidateCorners.addAll(candidateCornerEllipsis);
// filterCandidateCornersWhichFallOutsideROI(cnadidateCorners);
// A corner should have at most 4 candidates, be them Harris corners or ellipsis corners
// More than that probably means something is wrong in the detection, or there's something
// else in the scene, like a player's hand or something else
// if (candidateCorners.size() > 4) return null;
Corner candidateCorner = getCandidateNearestToCenterOfRegionOfInterest(candidateCorners);
if (candidateCorner != null) {
Ponto upperLeftCornerOfRegionOfInterest = corner.position.add(new Ponto(-RADIUS_OF_REGION_OF_INTEREST, -RADIUS_OF_REGION_OF_INTEREST));
Ponto newCornerPosition = candidateCorner.position.add(upperLeftCornerOfRegionOfInterest);
return new Corner(newCornerPosition.x, newCornerPosition.y, candidateCorner.isStone);
}
return null;
}
Aggregations