Add some documentation
This commit is contained in:
parent
fb5c3174f1
commit
f8fcafbea2
|
@ -1,4 +1,7 @@
|
||||||
package com.tearabite.ftctearabits.common;
|
package com.tearabite.ftctearabits.common;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Enum for the two alliances in FTC
|
||||||
|
*/
|
||||||
public enum Alliance { Blue, Red }
|
public enum Alliance { Blue, Red }
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,9 @@ package com.tearabite.ftctearabits.graphics;
|
||||||
import android.graphics.Color;
|
import android.graphics.Color;
|
||||||
import android.graphics.Paint;
|
import android.graphics.Paint;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A class to represent a paint object for drawing lines
|
||||||
|
*/
|
||||||
public class LinePaint extends Paint
|
public class LinePaint extends Paint
|
||||||
{
|
{
|
||||||
public LinePaint(int color)
|
public LinePaint(int color)
|
||||||
|
|
|
@ -22,21 +22,38 @@ import java.util.ArrayList;
|
||||||
import lombok.Getter;
|
import lombok.Getter;
|
||||||
import lombok.Setter;
|
import lombok.Setter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A basic color detection vision processor that detects the largest contour of a specified color
|
||||||
|
*/
|
||||||
public class BasicColorDetectionVisionProcessor implements VisionProcessor {
|
public class BasicColorDetectionVisionProcessor implements VisionProcessor {
|
||||||
public static final Size BLUR_SIZE = new Size(7, 7);
|
private static final Mat STRUCTURING_ELEMENT = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(5, 5));
|
||||||
public static final int ERODE_DILATE_ITERATIONS = 2;
|
private static final Point ANCHOR = new Point((STRUCTURING_ELEMENT.cols() / 2f), STRUCTURING_ELEMENT.rows() / 2f);
|
||||||
public static final Mat STRUCTURING_ELEMENT = Imgproc.getStructuringElement(Imgproc.MORPH_RECT, new Size(5, 5));
|
|
||||||
public static final Point ANCHOR = new Point((STRUCTURING_ELEMENT.cols() / 2f), STRUCTURING_ELEMENT.rows() / 2f);
|
/**
|
||||||
|
* The size of the blur kernel
|
||||||
|
*/
|
||||||
|
@Getter @Setter private Size blurSize = new Size(7, 7);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The color ranges to detect
|
||||||
|
*/
|
||||||
|
@Getter @Setter private ScalarRange[] colorRanges;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The detection object
|
||||||
|
*/
|
||||||
|
@Getter private Detection detection;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The number of iterations to erode and dilate the mask
|
||||||
|
*/
|
||||||
|
@Getter @Setter private int erodeDilateIterations = 2;
|
||||||
|
|
||||||
private final Mat blurred = new Mat();
|
private final Mat blurred = new Mat();
|
||||||
private final Mat hsv = new Mat();
|
private final Mat hsv = new Mat();
|
||||||
private final Mat mask = new Mat();
|
private final Mat mask = new Mat();
|
||||||
private final Mat tmpMask = new Mat();
|
private final Mat tmpMask = new Mat();
|
||||||
|
|
||||||
@Getter @Setter private ScalarRange[] colorRanges;
|
|
||||||
@Getter private Detection detection;
|
|
||||||
|
|
||||||
|
|
||||||
public BasicColorDetectionVisionProcessor(ScalarRange... colorRanges) {
|
public BasicColorDetectionVisionProcessor(ScalarRange... colorRanges) {
|
||||||
this.colorRanges = colorRanges;
|
this.colorRanges = colorRanges;
|
||||||
}
|
}
|
||||||
|
@ -62,7 +79,7 @@ public class BasicColorDetectionVisionProcessor implements VisionProcessor {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public Object processFrame(Mat input, long captureTimeNanos) {
|
public Object processFrame(Mat input, long captureTimeNanos) {
|
||||||
Imgproc.GaussianBlur(input, blurred, BLUR_SIZE, 0);
|
Imgproc.GaussianBlur(input, blurred, blurSize, 0);
|
||||||
Imgproc.cvtColor(blurred, hsv, Imgproc.COLOR_RGB2HSV);
|
Imgproc.cvtColor(blurred, hsv, Imgproc.COLOR_RGB2HSV);
|
||||||
|
|
||||||
mask.release();
|
mask.release();
|
||||||
|
@ -74,8 +91,8 @@ public class BasicColorDetectionVisionProcessor implements VisionProcessor {
|
||||||
Core.add(mask, tmpMask, mask);
|
Core.add(mask, tmpMask, mask);
|
||||||
}
|
}
|
||||||
|
|
||||||
Imgproc.erode(mask, mask, STRUCTURING_ELEMENT, ANCHOR, ERODE_DILATE_ITERATIONS);
|
Imgproc.erode(mask, mask, STRUCTURING_ELEMENT, ANCHOR, erodeDilateIterations);
|
||||||
Imgproc.dilate(mask, mask, STRUCTURING_ELEMENT, ANCHOR, ERODE_DILATE_ITERATIONS);
|
Imgproc.dilate(mask, mask, STRUCTURING_ELEMENT, ANCHOR, erodeDilateIterations);
|
||||||
|
|
||||||
ArrayList<MatOfPoint> contours = new ArrayList<>();
|
ArrayList<MatOfPoint> contours = new ArrayList<>();
|
||||||
Imgproc.findContours(mask, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
|
Imgproc.findContours(mask, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
|
||||||
|
@ -100,10 +117,10 @@ public class BasicColorDetectionVisionProcessor implements VisionProcessor {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param ignoreSmallerThan the minimum area threshold in pixels
|
* @param minimumAreaThreshold the minimum area threshold in pixels
|
||||||
*/
|
*/
|
||||||
public void setMinimumAreaThreshold(double ignoreSmallerThan) {
|
public void setMinimumAreaThreshold(double minimumAreaThreshold) {
|
||||||
this.detection.setMinimumAreaThreshold(ignoreSmallerThan);
|
this.detection.setMinimumAreaThreshold(minimumAreaThreshold);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -114,9 +131,9 @@ public class BasicColorDetectionVisionProcessor implements VisionProcessor {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param ignoreLargerThan the maximum area threshold in pixels
|
* @param maximumAreaThreshold the maximum area threshold in pixels
|
||||||
*/
|
*/
|
||||||
public void setMaximumAreaThreshold(double ignoreLargerThan) {
|
public void setMaximumAreaThreshold(double maximumAreaThreshold) {
|
||||||
this.detection.setMaximumAreaThreshold(ignoreLargerThan);
|
this.detection.setMaximumAreaThreshold(maximumAreaThreshold);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -13,6 +13,10 @@ import lombok.Getter;
|
||||||
import lombok.NoArgsConstructor;
|
import lombok.NoArgsConstructor;
|
||||||
import lombok.Setter;
|
import lombok.Setter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This class is used to represent a detection from the a VisionProcessor pipeline.
|
||||||
|
* It contains various useful abstraction methods for interacting with the detection.
|
||||||
|
*/
|
||||||
@NoArgsConstructor()
|
@NoArgsConstructor()
|
||||||
@AllArgsConstructor
|
@AllArgsConstructor
|
||||||
@Builder
|
@Builder
|
||||||
|
@ -25,6 +29,11 @@ public class Detection {
|
||||||
@Getter @Setter private double maxAreaThreshold;
|
@Getter @Setter private double maxAreaThreshold;
|
||||||
@Getter @Setter private double minAreaThreshold;
|
@Getter @Setter private double minAreaThreshold;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns whether the detection is valid or not.
|
||||||
|
* A detection is considered valid if it has a contour and its area is within
|
||||||
|
* the min and max area thresholds.
|
||||||
|
*/
|
||||||
public boolean isValid() {
|
public boolean isValid() {
|
||||||
double area = getArea();
|
double area = getArea();
|
||||||
return contour != null
|
return contour != null
|
||||||
|
@ -32,6 +41,11 @@ public class Detection {
|
||||||
&& area < maxAreaThreshold;
|
&& area < maxAreaThreshold;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the area of the detection in the specified scale.
|
||||||
|
* @param scale The scale to return the area in
|
||||||
|
* @return The area of the detection
|
||||||
|
*/
|
||||||
public double getArea(PropertyScale scale) {
|
public double getArea(PropertyScale scale) {
|
||||||
if (!isValid()) {
|
if (!isValid()) {
|
||||||
return INVALID_AREA;
|
return INVALID_AREA;
|
||||||
|
@ -45,10 +59,19 @@ public class Detection {
|
||||||
return (areaPx / (frameSize.width * frameSize.height)) * 100;
|
return (areaPx / (frameSize.width * frameSize.height)) * 100;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the area of the detection in pixels.
|
||||||
|
* @return The pixel area of the detection
|
||||||
|
*/
|
||||||
public double getArea() {
|
public double getArea() {
|
||||||
return getArea(PropertyScale.Pixels);
|
return getArea(PropertyScale.Pixels);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the center of the detection in the specified scale.
|
||||||
|
* @param scale The scale to return the center in
|
||||||
|
* @return The center of the detection
|
||||||
|
*/
|
||||||
public Point getCenter(PropertyScale scale) {
|
public Point getCenter(PropertyScale scale) {
|
||||||
if (!isValid()) {
|
if (!isValid()) {
|
||||||
return INVALID_POINT;
|
return INVALID_POINT;
|
||||||
|
@ -62,40 +85,67 @@ public class Detection {
|
||||||
return pixelPointToPercentageOfFrame(centerPx);
|
return pixelPointToPercentageOfFrame(centerPx);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the pixel center of the detection.
|
||||||
|
* @return The pixel center of the detection
|
||||||
|
*/
|
||||||
public Point getCenter() {
|
public Point getCenter() {
|
||||||
return getCenter(PropertyScale.Pixels);
|
return getCenter(PropertyScale.Pixels);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setMaximumAreaThreshold(double ignoreLargerThan, PropertyScale scale) {
|
/**
|
||||||
|
* Sets the maximum area threshold for the detection.
|
||||||
|
* @param maximumAreaThreshold The maximum area threshold
|
||||||
|
* @param scale The scale that maximumAreaThreshold is specified in
|
||||||
|
*/
|
||||||
|
public void setMaximumAreaThreshold(double maximumAreaThreshold, PropertyScale scale) {
|
||||||
switch (scale) {
|
switch (scale) {
|
||||||
case Pixels:
|
case Pixels:
|
||||||
this.maxAreaThreshold = ignoreLargerThan;
|
this.maxAreaThreshold = maximumAreaThreshold;
|
||||||
break;
|
break;
|
||||||
case Percent:
|
case Percent:
|
||||||
this.maxAreaThreshold = frameSize.area() * ignoreLargerThan;
|
this.maxAreaThreshold = frameSize.area() * maximumAreaThreshold;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setMaximumAreaThreshold(double threshold) {
|
/**
|
||||||
setMaximumAreaThreshold(threshold, PropertyScale.Pixels);
|
* Sets the maximum area threshold for the detection in pixels.
|
||||||
|
* @param maximumAreaThreshold The maximum area threshold
|
||||||
|
*/
|
||||||
|
public void setMaximumAreaThreshold(double maximumAreaThreshold) {
|
||||||
|
setMaximumAreaThreshold(maximumAreaThreshold, PropertyScale.Pixels);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setMinimumAreaThreshold(double threshold, PropertyScale scale) {
|
/**
|
||||||
|
* Sets the minimum area threshold for the detection.
|
||||||
|
* @param minimumAreaThreshold The minimum area threshold
|
||||||
|
* @param scale The scale that minimumAreaThreshold is specified in
|
||||||
|
*/
|
||||||
|
public void setMinimumAreaThreshold(double minimumAreaThreshold, PropertyScale scale) {
|
||||||
switch (scale) {
|
switch (scale) {
|
||||||
case Pixels:
|
case Pixels:
|
||||||
this.minAreaThreshold = threshold;
|
this.minAreaThreshold = minimumAreaThreshold;
|
||||||
break;
|
break;
|
||||||
case Percent:
|
case Percent:
|
||||||
this.minAreaThreshold = frameSize.area() * threshold;
|
this.minAreaThreshold = frameSize.area() * minimumAreaThreshold;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setMinimumAreaThreshold(double threshold) {
|
/**
|
||||||
setMinimumAreaThreshold(threshold, PropertyScale.Pixels);
|
* Sets the minimum area threshold for the detection in pixels.
|
||||||
|
* @param minimumAreaThreshold The minimum area threshold
|
||||||
|
*/
|
||||||
|
public void setMinimumAreaThreshold(double minimumAreaThreshold) {
|
||||||
|
setMinimumAreaThreshold(minimumAreaThreshold, PropertyScale.Pixels);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the maximum area threshold for the detection in the specified scale.
|
||||||
|
* @param scale The scale to return the maximum area threshold in
|
||||||
|
* @return The maximum area threshold
|
||||||
|
*/
|
||||||
public double getMaximumAreaThreshold(PropertyScale scale) {
|
public double getMaximumAreaThreshold(PropertyScale scale) {
|
||||||
switch (scale) {
|
switch (scale) {
|
||||||
default:
|
default:
|
||||||
|
@ -106,10 +156,19 @@ public class Detection {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the maximum area threshold for the detection in pixels.
|
||||||
|
* @return The maximum area threshold
|
||||||
|
*/
|
||||||
public double getMaximumAreaThreshold() {
|
public double getMaximumAreaThreshold() {
|
||||||
return getMaximumAreaThreshold(PropertyScale.Pixels);
|
return getMaximumAreaThreshold(PropertyScale.Pixels);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the minimum area threshold for the detection in the specified scale.
|
||||||
|
* @param scale The scale to return the minimum area threshold in
|
||||||
|
* @return The minimum area threshold
|
||||||
|
*/
|
||||||
public double getMinimumAreaThreshold(PropertyScale scale) {
|
public double getMinimumAreaThreshold(PropertyScale scale) {
|
||||||
switch (scale) {
|
switch (scale) {
|
||||||
default:
|
default:
|
||||||
|
@ -120,12 +179,21 @@ public class Detection {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Returns the minimum area threshold for the detection in pixels.
|
||||||
|
* @return The minimum area threshold
|
||||||
|
*/
|
||||||
public double getMinimumAreaThreshold() {
|
public double getMinimumAreaThreshold() {
|
||||||
return getMinimumAreaThreshold(PropertyScale.Pixels);
|
return getMinimumAreaThreshold(PropertyScale.Pixels);
|
||||||
}
|
}
|
||||||
|
|
||||||
public enum PropertyScale { Pixels, Percent }
|
public enum PropertyScale { Pixels, Percent }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Converts a pixel point to a percentage of the frame.
|
||||||
|
* @param pixelPoint The pixel point to convert
|
||||||
|
* @return The percentage of the frame that the pixel point is at
|
||||||
|
*/
|
||||||
private Point pixelPointToPercentageOfFrame(Point pixelPoint) {
|
private Point pixelPointToPercentageOfFrame(Point pixelPoint) {
|
||||||
double normalizedX = ((pixelPoint.x / frameSize.width) * 100) - 50;
|
double normalizedX = ((pixelPoint.x / frameSize.width) * 100) - 50;
|
||||||
double normalizedY = ((pixelPoint.y / frameSize.height) * -100) + 50;
|
double normalizedY = ((pixelPoint.y / frameSize.height) * -100) + 50;
|
||||||
|
|
|
@ -1,6 +1,10 @@
|
||||||
package com.tearabite.ftctearabits.vision;
|
package com.tearabite.ftctearabits.vision;
|
||||||
|
|
||||||
import org.opencv.core.Scalar;
|
import org.opencv.core.Scalar;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A class containing common HSV color ranges for the FIRST Tech Challenge
|
||||||
|
*/
|
||||||
public class FTCColors {
|
public class FTCColors {
|
||||||
public static Scalar FTC_RED_LOWER = new Scalar(165, 80, 80);
|
public static Scalar FTC_RED_LOWER = new Scalar(165, 80, 80);
|
||||||
public static Scalar FTC_RED_UPPER = new Scalar(15, 255, 255);
|
public static Scalar FTC_RED_UPPER = new Scalar(15, 255, 255);
|
||||||
|
|
|
@ -12,6 +12,9 @@ import org.opencv.imgproc.Moments;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A utility class for common vision operations
|
||||||
|
*/
|
||||||
public class OpenCVUtil {
|
public class OpenCVUtil {
|
||||||
|
|
||||||
public static void drawPoint(Mat img, Point point, Scalar color) {
|
public static void drawPoint(Mat img, Point point, Scalar color) {
|
||||||
|
|
|
@ -5,6 +5,9 @@ import org.opencv.core.Scalar;
|
||||||
import lombok.AllArgsConstructor;
|
import lombok.AllArgsConstructor;
|
||||||
import lombok.Data;
|
import lombok.Data;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A class for specifying an upper and lower bound for a color range.
|
||||||
|
*/
|
||||||
@Data
|
@Data
|
||||||
@AllArgsConstructor
|
@AllArgsConstructor
|
||||||
public class ScalarRange {
|
public class ScalarRange {
|
||||||
|
|
Loading…
Reference in New Issue