Working red detection

This commit is contained in:
Scott Barnes 2023-10-14 16:27:44 -05:00
parent f3a5a54f67
commit 49d4939366
10 changed files with 670 additions and 0 deletions

View File

@ -0,0 +1,28 @@
package opmodes;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.teamcode.hardware.Camera;
import org.firstinspires.ftc.teamcode.hardware.Robot;
/*
* Demonstrates an empty iterative OpMode
*/
@TeleOp(name = "Sandbox", group = "Experimental")
public class Sandbox extends OpMode {
private Camera camera;
@Override
public void init() {
telemetry.addData("Status", "Initialized");
this.camera = new Camera(this.hardwareMap);
this.camera.initTargetingCamera();
}
@Override
public void loop() {
}
}

View File

@ -0,0 +1,60 @@
package org.firstinspires.ftc.teamcode.hardware;
import static org.firstinspires.ftc.teamcode.util.Constants.INVALID_DETECTION;
import static org.firstinspires.ftc.teamcode.util.Constants.TARGETING_WEBCAM;
import static org.firstinspires.ftc.teamcode.util.Constants.WEBCAM_HEIGHT;
import static org.firstinspires.ftc.teamcode.util.Constants.WEBCAM_ROTATION;
import static org.firstinspires.ftc.teamcode.util.Constants.WEBCAM_WIDTH;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.teamcode.vision.Detection;
import org.firstinspires.ftc.teamcode.vision.TargetingPipeline;
import org.openftc.easyopencv.OpenCvCamera;
import org.openftc.easyopencv.OpenCvCameraFactory;
// Class for the camera
public class Camera {
private HardwareMap hardwareMap;
private OpenCvCamera targetingCamera;
private TargetingPipeline targetingPipeline;
private boolean targetingCameraInitialized;
// Constructor
public Camera(HardwareMap hardwareMap) {
this.hardwareMap = hardwareMap;
}
// Initiate the Targeting Camera
public void initTargetingCamera() {
int targetingCameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
this.targetingCamera = OpenCvCameraFactory.getInstance().createWebcam(hardwareMap.get(WebcamName.class, TARGETING_WEBCAM), targetingCameraMonitorViewId);
this.targetingPipeline = new TargetingPipeline();
targetingCamera.setPipeline(targetingPipeline);
targetingCamera.openCameraDeviceAsync(new OpenCvCamera.AsyncCameraOpenListener() {
@Override
public void onOpened() {
targetingCamera.startStreaming(WEBCAM_WIDTH, WEBCAM_HEIGHT, WEBCAM_ROTATION);
targetingCameraInitialized = true;
}
@Override
public void onError(int errorCode) {
}
});
}
// Close the Targeting Camera
public void stopTargetingCamera() {
if (targetingCameraInitialized) {
targetingCamera.closeCameraDeviceAsync(() -> targetingCameraInitialized = false);
}
}
// Get the Red Goal Detection
public Detection getRed() {
return (targetingCameraInitialized ? targetingPipeline.getRed() : INVALID_DETECTION);
}
}

View File

@ -0,0 +1,28 @@
package org.firstinspires.ftc.teamcode.hardware;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.Gamepad;
import com.qualcomm.robotcore.hardware.HardwareMap;
public class Drive {
private DcMotor frontLeft;
private DcMotor frontRight;
private DcMotor backLeft;
private DcMotor backRight;
public Drive(HardwareMap hardwareMap) {
this.init(hardwareMap);
}
private void init(HardwareMap hardwareMap) {
this.frontLeft = hardwareMap.get(DcMotor.class, "frontLeft");
this.frontRight = hardwareMap.get(DcMotor.class, "frontRight");
this.backLeft = hardwareMap.get(DcMotor.class, "backLeft");
this.backRight = hardwareMap.get(DcMotor.class, "backRight");
}
public void setInput(Gamepad gamepad1,Gamepad gamepad2) {
}
}

View File

@ -0,0 +1,20 @@
package org.firstinspires.ftc.teamcode.hardware;
import com.qualcomm.robotcore.hardware.HardwareMap;
public class Robot {
private Drive drive;
public Robot(HardwareMap hardwareMap) {
this.init(hardwareMap);
}
private void init(HardwareMap hardwareMap) {
this.drive = new Drive(hardwareMap);
}
public Drive getDrive() {
return this.drive;
}
}

View File

@ -0,0 +1,30 @@
package org.firstinspires.ftc.teamcode.util;
public class Color {
public double h;
public double s;
public double v;
public Color(double h, double s, double v) {
this.h = h;
this.s = s;
this.v = v;
}
public double[] get() {
return new double[]{h, s, v};
}
public double getH() {
return h;
}
public double getS() {
return s;
}
public double getV() {
return v;
}
}

View File

@ -0,0 +1,79 @@
package org.firstinspires.ftc.teamcode.util;
import com.qualcomm.robotcore.hardware.PIDFCoefficients;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Size;
public class Configurables {
// Robot Constants
public static double R_ARM_POWER = 0.2;
public static double R_ARM_SPEED = 20;
public static int R_ARM_DEFAULT_POS = 0;
public static int R_ARM_UP_POS = 221;
public static int R_ARM_ALMOST_DOWN_POS = 650;
public static int R_ARM_DOWN_POS = 750;
public static double R_CLAW_CLOSED = 0.13;
public static double R_CLAW_OPEN = 0.7;
public static double R_INTAKE_SPEED = 0.9;
public static double R_INTAKE_SHIELD_UP = 0.17;//0.05
public static double R_INTAKE_SHIELD_DOWN = 0.68;//0.95
public static double R_INTAKE_SHIELD_SPEED = 0.04;
public static double R_SHOOTER_GOAL_POWER = 0.66;
public static double R_SHOOTER_MID_GOAL_POWER = 0.54;
public static double R_SHOOTER_POWERSHOT_POWER = 0.57;
public static double R_PUSHER_CLOSED = 0.35;
public static double R_PUSHER_OPEN = 0.55;
public static double R_PUSHER_DELAY = 0.15;
// Auto Aim Constants
public static double AUTO_AIM_OFFSET_X = 5;
public static double AUTO_AIM_WAIT = 0.2;
public static PIDFCoefficients AUTO_AIM_PID = new PIDFCoefficients(0.009, 0.3, 0.0019, 0);
public static double AUTO_AIM_ACCEPTABLE_ERROR = 2;
public static double AUTO_AIM_MIN_POWER = 0.14;
// CV Color Threshold Constants
public static Color CAMERA_RED_GOAL_LOWER = new Color(165, 80, 80);
public static Color CAMERA_RED_GOAL_UPPER = new Color(15, 255, 255);
public static Color CAMERA_RED_POWERSHOT_LOWER = new Color(165, 80, 80);
public static Color CAMERA_RED_POWERSHOT_UPPER = new Color(15, 255, 255);
public static Color CAMERA_BLUE_GOAL_LOWER = new Color(75, 40, 80);
public static Color CAMERA_BLUE_GOAL_UPPER = new Color(120, 255, 255);
public static Color CAMERA_BLUE_POWERSHOT_LOWER = new Color(75, 30, 50);
public static Color CAMERA_BLUE_POWERSHOT_UPPER = new Color(120, 255, 255);
public static Color CAMERA_ORANGE_LOWER = new Color(0, 70, 50);
public static Color CAMERA_ORANGE_UPPER = new Color(60, 255, 255);
public static Color CAMERA_WHITE_LOWER = new Color(0, 0, 40);
public static Color CAMERA_WHITE_UPPER = new Color(180, 30, 255);
// CV Detection Constants
public static double CV_MIN_STARTERSTACK_AREA = 0;
public static double CV_MIN_STARTERSTACK_SINGLE_AREA = 0.08;
public static double CV_MIN_STARTERSTACK_QUAD_AREA = 1.3;
public static double CV_MIN_GOAL_AREA = 0;
public static double CV_MAX_GOAL_AREA = 0.3;
public static double CV_MIN_POWERSHOT_AREA = 0.001;
public static double CV_MAX_POWERSHOT_AREA = 0.05;
public static Rect CV_STARTERSTACK_LOCATION = new Rect(75, 50, 190, 90);
public static Point CV_POWERSHOT_OFFSET = new Point(-3, -20); // offset from the bottom left of the goal to the top right of the powershot box (for red)
public static Size CV_POWERSHOT_DIMENSIONS = new Size(100, 50);
public static Size CV_GOAL_PROPER_ASPECT = new Size(11, 8.5);
public static double CV_GOAL_PROPER_AREA = 1.25;
public static double CV_GOAL_ALLOWABLE_AREA_ERROR = 1;
public static double CV_GOAL_ALLOWABLE_SOLIDARITY_ERROR = 1;
public static double CV_GOAL_CUTOFF_Y_LINE = 65;
public static double CV_GOAL_PROPER_HEIGHT = 107;
public static double CV_GOAL_MIN_CONFIDENCE = 3;
public static Color CV_POWERSHOT_OFFSETS_RED = new Color(-40, -30, -19);
public static Color CV_POWERSHOT_OFFSETS_BLUE = new Color(40, 30, 19);
// Old CV Detection Constants
public static double CV_GOAL_SIDE_ALLOWABLE_Y_ERROR = 20;
public static double CV_GOAL_SIDE_ALLOWABLE_SIZE_ERROR = 100;
public static Size CV_GOAL_SIDE_ASPECT_RATIO = new Size(6.5,15.5);
public static double CV_GOAL_SIDE_ALLOWABLE_ASPECT_ERROR = 10;
}

View File

@ -0,0 +1,54 @@
package org.firstinspires.ftc.teamcode.util;
import org.firstinspires.ftc.teamcode.vision.Detection;
import org.opencv.core.Mat;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.openftc.easyopencv.OpenCvCameraRotation;
public class Constants {
// CV Color Constants
public static Scalar RED = new Scalar(255, 0, 0);
public static Scalar GREEN = new Scalar(0, 255, 0);
public static Scalar BLUE = new Scalar(0, 0, 255);
public static Scalar WHITE = new Scalar(255, 255, 255);
public static Scalar GRAY = new Scalar(80, 80, 80);
public static Scalar BLACK = new Scalar(0, 0, 0);
public static Scalar ORANGE = new Scalar(255, 165, 0);
public static Scalar YELLOW = new Scalar(255, 255, 0);
public static Scalar PURPLE = new Scalar(128, 0, 128);
// CV Structuring Constants
public static final Mat STRUCTURING_ELEMENT = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(5, 5));
public static final Point ANCHOR = new Point((STRUCTURING_ELEMENT.cols() / 2f), STRUCTURING_ELEMENT.rows() / 2f);
public static final int ERODE_DILATE_ITERATIONS = 2;
public static final Size BLUR_SIZE = new Size(7, 7);
// CV Camera Constants
public static final int WEBCAM_WIDTH = 320;
public static final int WEBCAM_HEIGHT = 240;
public static final OpenCvCameraRotation WEBCAM_ROTATION = OpenCvCameraRotation.UPRIGHT;
// CV Invalid Detection Constants
public static final Point INVALID_POINT = new Point(Double.MIN_VALUE, Double.MIN_VALUE);
public static final double INVALID_AREA = -1;
public static final Detection INVALID_DETECTION = new Detection(new Size(0, 0), 0);
// Hardware Name Constants
public static final String WHEEL_FRONT_LEFT = "frontLeft";
public static final String WHEEL_FRONT_RIGHT = "frontRight";
public static final String WHEEL_BACK_LEFT = "backLeft";
public static final String WHEEL_BACK_RIGHT = "backRight";
public static final String ARM = "wobbler";
public static final String CLAW = "claw";
public static final String INTAKE = "intake";
public static final String INTAKE_SECONDARY = "secondary";
public static final String INTAKE_SHIELD = "shield";
public static final String SHOOTER = "wheel";
public static final String PUSHER = "pusher";
public static final String STACK_WEBCAM = "Stack Webcam";
public static final String TARGETING_WEBCAM = "Targeting Webcam";
public static final String IMU_SENSOR = "imu";
}

View File

@ -0,0 +1,153 @@
package org.firstinspires.ftc.teamcode.util;
import org.opencv.core.Mat;
import org.opencv.core.MatOfInt;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import org.opencv.imgproc.Moments;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import static org.firstinspires.ftc.teamcode.util.Configurables.CAMERA_BLUE_GOAL_LOWER;
import static org.firstinspires.ftc.teamcode.util.Configurables.CAMERA_BLUE_GOAL_UPPER;
import static org.firstinspires.ftc.teamcode.util.Configurables.CAMERA_RED_GOAL_LOWER;
import static org.firstinspires.ftc.teamcode.util.Configurables.CAMERA_RED_GOAL_UPPER;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_ALLOWABLE_AREA_ERROR;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_SIDE_ALLOWABLE_ASPECT_ERROR;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_SIDE_ALLOWABLE_SIZE_ERROR;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_ALLOWABLE_SOLIDARITY_ERROR;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_SIDE_ALLOWABLE_Y_ERROR;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_SIDE_ASPECT_RATIO;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_MIN_CONFIDENCE;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_PROPER_AREA;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_PROPER_ASPECT;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_PROPER_HEIGHT;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_MAX_GOAL_AREA;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_MIN_GOAL_AREA;
// CV Helper Functions
public class OpenCVUtil {
public static String telem = "nothing";
// Draw a point
public static void drawPoint(Mat img, Point point, Scalar color) {
Imgproc.circle(img, point, 3, color, -1);
}
// Get the center of a contour
public static Point getCenterOfContour(MatOfPoint contour) {
Moments moments = Imgproc.moments(contour);
return new Point(moments.m10 / moments.m00, moments.m01/ moments.m00);
}
// Get the bottom left of a contour
public static Point getBottomLeftOfContour(MatOfPoint contour) {
Rect boundingRect = Imgproc.boundingRect(contour);
return new Point(boundingRect.x, boundingRect.y+boundingRect.height);
}
// Get the bottom right of a contour
public static Point getBottomRightOfContour(MatOfPoint contour) {
Rect boundingRect = Imgproc.boundingRect(contour);
return new Point(boundingRect.x+boundingRect.width, boundingRect.y+boundingRect.height);
}
// Draw a contour
public static void drawContour(Mat img, MatOfPoint contour, Scalar color) {
Imgproc.drawContours(img, Collections.singletonList(contour), 0, color, 2);
}
// Draw a convex hull around a contour
public static void drawConvexHull(Mat img, MatOfPoint contour, Scalar color) {
MatOfInt hull = new MatOfInt();
Imgproc.convexHull(contour, hull);
Imgproc.drawContours(img, Collections.singletonList(convertIndexesToPoints(contour, hull)), 0, color, 2);
}
// Draw a filled in convex hull around a contour
public static void fillConvexHull(Mat img, MatOfPoint contour, Scalar color) {
MatOfInt hull = new MatOfInt();
Imgproc.convexHull(contour, hull);
Imgproc.drawContours(img, Collections.singletonList(convertIndexesToPoints(contour, hull)), 0, color, -1);
}
// Convert indexes to points that is used in order to draw the contours
public static MatOfPoint convertIndexesToPoints(MatOfPoint contour, MatOfInt indexes) {
int[] arrIndex = indexes.toArray();
Point[] arrContour = contour.toArray();
Point[] arrPoints = new Point[arrIndex.length];
for (int i=0;i<arrIndex.length;i++) {
arrPoints[i] = arrContour[arrIndex[i]];
}
MatOfPoint hull = new MatOfPoint();
hull.fromArray(arrPoints);
return hull;
}
// Get the largest contour out of a list
public static MatOfPoint getLargestContour(List<MatOfPoint> contours) {
if (contours.size() == 0) {
return null;
}
return getLargestContours(contours, 1).get(0);
}
// Get the top largest contours
public static List<MatOfPoint> getLargestContours(List<MatOfPoint> contours, int numContours) {
Collections.sort(contours, (a, b) -> (int) Imgproc.contourArea(b) - (int) Imgproc.contourArea(a));
return contours.subList(0, Math.min(numContours, contours.size()));
}
public static MatOfPoint getHighGoalContour(List<MatOfPoint> contours) {
Collections.sort(contours, (a, b) -> (int) Imgproc.contourArea(b) - (int) Imgproc.contourArea(a));
// return null if nothing
if (contours.size() == 0) {
return null;
}
// check each contour for touching the top, aspect, and size
double properAspect = ((double) CV_GOAL_SIDE_ASPECT_RATIO.height) / ((double) CV_GOAL_SIDE_ASPECT_RATIO.width);
for (int i = 0; i < contours.size() - 1; i++) {
MatOfPoint contour = contours.get(i);
Rect rect = Imgproc.boundingRect(contour);
double area = Imgproc.contourArea(contour);
double aspect = ((double) rect.height) / ((double) rect.width);
if (rect.y <= -100 || Math.abs(properAspect - aspect) > CV_GOAL_SIDE_ALLOWABLE_ASPECT_ERROR ||
area < CV_MIN_GOAL_AREA || area > CV_MAX_GOAL_AREA) {
contours.remove(i);
i--;
}
}
// check for 2 that can be combined
int goalCounter = -1;
for (int i = 0; i < contours.size() - 1; i++) {
MatOfPoint contour1 = contours.get(i);
MatOfPoint contour2 = contours.get(i + 1);
Rect rect1 = Imgproc.boundingRect(contour1);
Rect rect2 = Imgproc.boundingRect(contour2);
double area1 = Imgproc.contourArea(contour1);
double area2 = Imgproc.contourArea(contour2);
if (Math.abs(Math.abs(rect1.y) - Math.abs(rect2.y)) < CV_GOAL_SIDE_ALLOWABLE_Y_ERROR &&
Math.abs(area1 - area2) < CV_GOAL_SIDE_ALLOWABLE_SIZE_ERROR) {
goalCounter = i;
break;
}
}
// return the results
if (goalCounter == -1) {
return contours.get(0);
} else {
MatOfPoint highGoal = new MatOfPoint();
highGoal.push_back(contours.get(goalCounter));
highGoal.push_back(contours.get(goalCounter + 1));
return highGoal;
}
}
}

View File

@ -0,0 +1,136 @@
package org.firstinspires.ftc.teamcode.vision;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import static org.firstinspires.ftc.teamcode.util.Constants.GREEN;
import static org.firstinspires.ftc.teamcode.util.Constants.INVALID_AREA;
import static org.firstinspires.ftc.teamcode.util.Constants.INVALID_POINT;
import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.drawConvexHull;
import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.drawPoint;
import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.fillConvexHull;
import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.getBottomLeftOfContour;
import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.getBottomRightOfContour;
import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.getCenterOfContour;
// Class for a Detection
public class Detection {
private double minAreaPx;
private double maxAreaPx;
private final Size maxSizePx;
private double areaPx = INVALID_AREA;
private Point centerPx = INVALID_POINT;
private Point bottomLeftPx = INVALID_POINT;
private Point bottomRightPx = INVALID_POINT;
private MatOfPoint contour;
// Constructor
public Detection(Size frameSize, double minAreaFactor) {
this.maxSizePx = frameSize;
this.minAreaPx = frameSize.area() * minAreaFactor;
this.maxAreaPx = frameSize.area();
}
public Detection(Size frameSize, double minAreaFactor, double maxSizeFactor) {
this.maxSizePx = frameSize;
this.minAreaPx = frameSize.area() * minAreaFactor;
this.maxAreaPx = frameSize.area() * maxSizeFactor;
}
public void setMinArea(double minAreaFactor) {
this.minAreaPx = maxSizePx.area() * minAreaFactor;
}
public void setMaxArea(double maxAreaFactor) {
this.minAreaPx = maxSizePx.area() * maxAreaFactor;
}
// Draw a convex hull around the current detection on the given image
public void draw(Mat img, Scalar color) {
if (isValid()) {
drawConvexHull(img, contour, color);
drawPoint(img, centerPx, GREEN);
// drawPoint(img, bottomLeftPx, GREEN);
// drawPoint(img, bottomRightPx, GREEN);
}
}
// Draw a convex hull around the current detection on the given image
public void fill(Mat img, Scalar color) {
if (isValid()) {
fillConvexHull(img, contour, color);
drawPoint(img, centerPx, GREEN);
// drawPoint(img, bottomLeftPx, GREEN);
// drawPoint(img, bottomRightPx, GREEN);
}
}
// Check if the current Detection is valid
public boolean isValid() {
// return true;
return (this.contour != null) && (this.centerPx != INVALID_POINT) && (this.areaPx != INVALID_AREA);
}
// Get the current contour
public MatOfPoint getContour() {
return contour;
}
// Set the values of the current contour
public void setContour(MatOfPoint contour) {
this.contour = contour;
double area;
if (contour != null && (area = Imgproc.contourArea(contour)) > minAreaPx && area < maxAreaPx) {
this.areaPx = area;
this.centerPx = getCenterOfContour(contour);
this.bottomLeftPx = getBottomLeftOfContour(contour);
this.bottomRightPx = getBottomRightOfContour(contour);
} else {
this.areaPx = INVALID_AREA;
this.centerPx = INVALID_POINT;
this.bottomLeftPx = INVALID_POINT;
this.bottomRightPx = INVALID_POINT;
}
}
// Returns the center of the Detection, normalized so that the width and height of the frame is from [-50,50]
public Point getCenter() {
if (!isValid()) {
return INVALID_POINT;
}
double normalizedX = ((centerPx.x / maxSizePx.width) * 100) - 50;
double normalizedY = ((centerPx.y / maxSizePx.height) * -100) + 50;
return new Point(normalizedX, normalizedY);
}
// Get the center point in pixels
public Point getCenterPx() {
return centerPx;
}
// Get the area of the Detection, normalized so that the area of the frame is 100
public double getArea() {
if (!isValid()) {
return INVALID_AREA;
}
return (areaPx / (maxSizePx.width * maxSizePx.height)) * 100;
}
// Get the leftmost bottom corner of the detection
public Point getBottomLeftCornerPx() {
return bottomLeftPx;
}
// Get the rightmost bottom corner of the detection
public Point getBottomRightCornerPx() {
return bottomRightPx;
}
}

View File

@ -0,0 +1,82 @@
package org.firstinspires.ftc.teamcode.vision;
import static org.firstinspires.ftc.teamcode.util.Configurables.CAMERA_RED_GOAL_LOWER;
import static org.firstinspires.ftc.teamcode.util.Configurables.CAMERA_RED_GOAL_UPPER;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_MAX_GOAL_AREA;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_MIN_GOAL_AREA;
import static org.firstinspires.ftc.teamcode.util.Constants.ANCHOR;
import static org.firstinspires.ftc.teamcode.util.Constants.BLUR_SIZE;
import static org.firstinspires.ftc.teamcode.util.Constants.ERODE_DILATE_ITERATIONS;
import static org.firstinspires.ftc.teamcode.util.Constants.RED;
import static org.firstinspires.ftc.teamcode.util.Constants.STRUCTURING_ELEMENT;
import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.getLargestContour;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import org.openftc.easyopencv.OpenCvPipeline;
import java.util.ArrayList;
public class TargetingPipeline extends OpenCvPipeline {
Mat blurred = new Mat();
Mat hsv = new Mat();
Mat redMask1 = new Mat();
Mat redMask2 = new Mat();
Mat redMask = new Mat();
Mat whiteMask = new Mat();
Scalar redGoalLower1;
Scalar redGoalUpper1;
Scalar redGoalLower2;
Scalar redGoalUpper2;
private Detection red;
// Init
@Override
public void init(Mat input) {
red = new Detection(input.size(), CV_MIN_GOAL_AREA);
}
// Process each frame that is received from the webcam
@Override
public Mat processFrame(Mat input) {
Imgproc.GaussianBlur(input, blurred, BLUR_SIZE, 0);
Imgproc.cvtColor(blurred, hsv, Imgproc.COLOR_RGB2HSV);
updateRed(input);
return input;
}
// Update the Red Goal Detection
private void updateRed(Mat input) {
// take pixels that are in the color range and put them into a mask, eroding and dilating them to remove white noise
redGoalLower1 = new Scalar(CAMERA_RED_GOAL_LOWER.getH(), CAMERA_RED_GOAL_LOWER.getS(), CAMERA_RED_GOAL_LOWER.getV());
redGoalUpper1 = new Scalar(180, CAMERA_RED_GOAL_UPPER.getS(), CAMERA_RED_GOAL_UPPER.getV());
redGoalLower2 = new Scalar(0, CAMERA_RED_GOAL_LOWER.getS(), CAMERA_RED_GOAL_LOWER.getV());
redGoalUpper2 = new Scalar(CAMERA_RED_GOAL_UPPER.getH(), CAMERA_RED_GOAL_UPPER.getS(), CAMERA_RED_GOAL_UPPER.getV());
Core.inRange(hsv, redGoalLower1, redGoalUpper1, redMask1);
Core.inRange(hsv, redGoalLower2, redGoalUpper2, redMask2);
Core.add(redMask1, redMask2, redMask);
Imgproc.erode(redMask, redMask, STRUCTURING_ELEMENT, ANCHOR, ERODE_DILATE_ITERATIONS);
Imgproc.dilate(redMask, redMask, STRUCTURING_ELEMENT, ANCHOR, ERODE_DILATE_ITERATIONS);
ArrayList<MatOfPoint> contours = new ArrayList<>();
Imgproc.findContours(redMask, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
for (int i = 0; i < contours.size(); i++) {
Detection newDetection = new Detection(input.size(), CV_MIN_GOAL_AREA, CV_MAX_GOAL_AREA);
newDetection.setContour(contours.get(i));
newDetection.draw(input, RED);
}
red.setContour(getLargestContour(contours));
red.fill(input, RED);
}
public Detection getRed() {
return this.red;
}
}