This commit is contained in:
sihan 2024-03-16 16:42:18 -05:00
parent 277d9d55cc
commit 10a35ed79d
21 changed files with 313 additions and 45 deletions

View File

@ -12,6 +12,7 @@ import static org.firstinspires.ftc.teamcode.util.Constants.SLIDERIGHT;
import static org.firstinspires.ftc.teamcode.util.Constants.WRIST;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.arcrobotics.ftclib.controller.PController;
import com.arcrobotics.ftclib.controller.PIDFController;
import com.arcrobotics.ftclib.gamepad.GamepadEx;
@ -130,9 +131,14 @@ public class Robot {
}
public void update() {
Pose2d estimatedPose = null;
if (camera != null) {
estimatedPose = this.camera.estimatePoseFromAprilTag();
}
this.arm.update();
this.wrist.update();
this.drive.update();
this.drive.update(estimatedPose);
}
public enum pickupMacroStates {

View File

@ -213,7 +213,15 @@ public class MecanumDrive extends com.acmerobotics.roadrunner.drive.MecanumDrive
}
public void update() {
updatePoseEstimate();
this.update(null);
}
public void update(Pose2d guess) {
if (guess == null) {
updatePoseEstimate();
} else {
setPoseEstimate(guess);
}
DriveSignal signal = trajectorySequenceRunner.update(getPoseEstimate(), getPoseVelocity());
if (signal != null) setDriveSignal(signal);
}

View File

@ -24,7 +24,6 @@ public abstract class AutoBase extends LinearOpMode {
public void runOpMode() {
this.telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
this.robot = new Robot().init(hardwareMap);
this.robot.getCamera().initTargetingCamera();
this.initialPosition = new Pose2d(-34, -59.5, Math.toRadians(270));
this.robot.getDrive().setPoseEstimate(initialPosition);

View File

@ -69,7 +69,7 @@ public class AutoBlue extends LinearOpMode {
this.telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
this.robot = new Robot().init(hardwareMap);
this.robot.getCamera().initTargetingCamera();
// this.robot.getCamera().initTargetingCamera();
this.initialPosition = new Pose2d(-34, -59.5, Math.toRadians(270));
this.robot.getDrive().setPoseEstimate(initialPosition);

View File

@ -126,7 +126,7 @@ public class AutoBlueFarStem extends LinearOpMode {
this.telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
this.robot = new Robot().init(hardwareMap);
this.robot.getCamera().initTargetingCamera();
// this.robot.getCamera().initTargetingCamera();
this.initialPosition = new Pose2d(34, -59.5, Math.toRadians(270));
this.robot.getDrive().setPoseEstimate(initialPosition);

View File

@ -159,7 +159,7 @@ public class AutoBlueTwoPlusTwo extends LinearOpMode {
this.telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
this.robot = new Robot().init(hardwareMap);
this.robot.getCamera().initTargetingCamera();
// this.robot.getCamera().initTargetingCamera();
this.initialPosition = new Pose2d(-34, -59.5, Math.toRadians(270));
this.robot.getDrive().setPoseEstimate(initialPosition);

View File

@ -20,18 +20,18 @@ public class AutoRed extends LinearOpMode {
//Preloads
final static Pose2d LEFT_PRELOAD_ONE = new Pose2d(40, -37.5, Math.toRadians(270));
final static Pose2d LEFT_PRELOAD_TWO = new Pose2d(29.5, -32, Math.toRadians(360));
final static Pose2d CENTER_PRELOAD = new Pose2d(33, -28, Math.toRadians(270));
final static Pose2d CENTER_PRELOAD = new Pose2d(33, -24, Math.toRadians(270));
final static Pose2d RIGHT_PRELOAD = new Pose2d(45, -35, Math.toRadians(270));
//Board Scores
final static Pose2d LEFT_BOARD = new Pose2d(75.8, -26.5, Math.toRadians(358));
final static Pose2d CENTER_BOARD = new Pose2d(75.8, -36.3, Math.toRadians(358));
final static Pose2d CENTER_BOARD = new Pose2d(80, -30.3, Math.toRadians(358));
final static Pose2d RIGHT_BOARD = new Pose2d(75.8, -40, Math.toRadians(358));
//Park
final static Pose2d PARK = new Pose2d(60,-58,Math.toRadians(360));
final static Pose2d PARK = new Pose2d(60, -58, Math.toRadians(360));
final static Pose2d PARK2 = new Pose2d(80, -60, Math.toRadians(360));
final static Pose2d PARKLEFT = new Pose2d(60,-15,Math.toRadians(360));
final static Pose2d PARKLEFT2 = new Pose2d(80, -10, Math.toRadians(360));
final static Pose2d PARKLEFT = new Pose2d(60, -6, Math.toRadians(360));
final static Pose2d PARKLEFT2 = new Pose2d(80, -6, Math.toRadians(360));
protected void scorePreloadOne() {
TrajectorySequenceBuilder builder = this.robot.getTrajectorySequenceBuilder();
@ -66,7 +66,7 @@ public class AutoRed extends LinearOpMode {
builder.addTemporalMarker(.2, robot.getArm()::armScore);
builder.addTemporalMarker(.2, robot.getSlides()::slideFirstLayer);
builder.addTemporalMarker(.2, robot.getWrist()::wristScore);
this.robot.getDrive().followTrajectorySequence(builder.build());
this.robot.getDrive().followTrajectorySequenceAsync(builder.build());
while (this.robot.getDrive().isBusy()) {
robot.update();
}
@ -87,7 +87,7 @@ public class AutoRed extends LinearOpMode {
builder.addTemporalMarker(.1, robot.getArm()::armRest);
builder.addTemporalMarker(.1, robot.getWrist()::wristPickup);
builder.addTemporalMarker(.1, robot.getSlides()::slideDown);
this.robot.getDrive().followTrajectorySequence(builder.build());
this.robot.getDrive().followTrajectorySequenceAsync(builder.build());
while (this.robot.getDrive().isBusy()) {
robot.update();
}
@ -105,7 +105,7 @@ public class AutoRed extends LinearOpMode {
this.telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
this.robot = new Robot().init(hardwareMap);
this.robot.getCamera().initTargetingCamera();
// this.robot.getCamera().initTargetingCamera();
this.initialPosition = new Pose2d(34, -59.5, Math.toRadians(270));
this.robot.getDrive().setPoseEstimate(initialPosition);

View File

@ -142,7 +142,7 @@ public class AutoRedFar extends LinearOpMode {
public void runOpMode() throws InterruptedException {
this.telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
this.robot = new Robot().init(hardwareMap);
this.robot.getCamera().initTargetingCamera();
// this.robot.getCamera().initTargetingCamera();
this.initialPosition = new Pose2d(-34, -59.5, Math.toRadians(270));
this.robot.getDrive().setPoseEstimate(initialPosition);

View File

@ -141,7 +141,7 @@ public class AutoRedFaronepluszero extends LinearOpMode {
public void runOpMode() throws InterruptedException {
this.telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
this.robot = new Robot().init(hardwareMap);
this.robot.getCamera().initTargetingCamera();
// this.robot.getCamera().initTargetingCamera();
this.initialPosition = new Pose2d(-34, -59.5, Math.toRadians(270));
this.robot.getDrive().setPoseEstimate(initialPosition);

View File

@ -12,7 +12,6 @@ import org.firstinspires.ftc.teamcode.hardware.roadrunner.drive.MecanumDrive;
import org.firstinspires.ftc.teamcode.hardware.roadrunner.trajectorysequence.TrajectorySequenceBuilder;
@Autonomous(name = "autoRed2+2")
public class AutoRedTwoPlusTwo extends LinearOpMode {
protected Pose2d initialPosition;
private Robot robot;
@ -161,7 +160,6 @@ public class AutoRedTwoPlusTwo extends LinearOpMode {
this.telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
this.robot = new Robot().init(hardwareMap);
this.robot.getCamera().initTargetingCamera();
this.initialPosition = new Pose2d(34, -59.5, Math.toRadians(270));
this.robot.getDrive().setPoseEstimate(initialPosition);

View File

@ -223,7 +223,6 @@ public class RedFarTwoPlusTwoTest extends LinearOpMode {
public void runOpMode() throws InterruptedException {
this.telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
this.robot = new Robot().init(hardwareMap);
this.robot.getCamera().initTargetingCamera();
this.initialPosition = new Pose2d(-34, -59.5, Math.toRadians(270));
this.robot.getDrive().setPoseEstimate(initialPosition);

View File

@ -0,0 +1,7 @@
package org.firstinspires.ftc.teamcode.util;
public class CenterStageCommon {
public enum Alliance {Blue, Red}
public enum PropLocation {Unknown, Left, Center, Right}
}

View File

@ -0,0 +1,26 @@
package org.firstinspires.ftc.teamcode.util;
import android.graphics.Color;
import com.tearabite.ielib.vision.ScalarRange;
import org.opencv.core.Scalar;
public class Colors {
// CV Color Threshold Constants
public static Scalar FTC_RED_LOWER = new Scalar(165, 80, 80);
public static Scalar FTC_RED_UPPER = new Scalar(15, 255, 255);
public static ScalarRange FTC_RED_RANGE_1 = new ScalarRange(new Scalar(180, FTC_RED_UPPER.val[1], FTC_RED_UPPER.val[2]), FTC_RED_LOWER);
public static ScalarRange FTC_RED_RANGE_2 = new ScalarRange(FTC_RED_UPPER, new Scalar(0, FTC_RED_LOWER.val[1], FTC_RED_LOWER.val[2]));
public static Scalar FTC_BLUE_LOWER = new Scalar(75, 40, 80);
public static Scalar FTC_BLUE_UPPER = new Scalar(120, 255, 255);
public static ScalarRange FTC_BLUE_RANGE = new ScalarRange(FTC_BLUE_UPPER, FTC_BLUE_LOWER);
public static Scalar FTC_WHITE_LOWER = new Scalar(0, 0, 40);
public static Scalar FTC_WHITE_UPPER = new Scalar(180, 30, 255);
public static OpenCVUtil.LinePaint RED = new OpenCVUtil.LinePaint(Color.RED);
public static OpenCVUtil.LinePaint BLUE = new OpenCVUtil.LinePaint(Color.BLUE);
public static OpenCVUtil.LinePaint BLACK = new OpenCVUtil.LinePaint(Color.BLACK);
public static OpenCVUtil.LinePaint WHITE = new OpenCVUtil.LinePaint(Color.WHITE);
}

View File

@ -23,6 +23,18 @@ public class Configurables {
public static double SLOWMODE_TURN = 0.3;
}
//Camera Stuff
@Config
public static class camerStuff {
public static double CAMERA_OFFSET_X = 0f;
public static double DETECTION_AREA_MIN = 0.02f;
public static double DETECTION_AREA_MAX = 0.3f;
public static double DETECTION_LEFT_X = 0;
public static double DETECTION_CENTER_X = .5;
public static double DETECTION_RIGHT_X = 1;
public static double SCORING_DISTANCE_FROM_APRIL_TAG = 6f;
}
//Auto Temp
@Config
public static class AuToDeV {

View File

@ -52,5 +52,6 @@ public class Constants {
public static final String HANGRIGHT = "hangright";
public static final String HANGLEFT = "hangleft";
public static final String LIGHTS = "lights";
public static final String TAG_CAMERA = "tagCamera";
}

View File

@ -1,5 +1,7 @@
package org.firstinspires.ftc.teamcode.util;
import android.graphics.Paint;
import org.opencv.core.Mat;
import org.opencv.core.MatOfInt;
import org.opencv.core.MatOfPoint;
@ -87,4 +89,9 @@ public class OpenCVUtil {
Collections.sort(contours, (a, b) -> (int) Imgproc.contourArea(b) - (int) Imgproc.contourArea(a));
return contours.subList(0, Math.min(numContours, contours.size()));
}
public static class LinePaint extends Paint {
public LinePaint(int red) {
}
}
}

View File

@ -0,0 +1,21 @@
package org.firstinspires.ftc.teamcode.util;
import org.opencv.core.Scalar;
public class ScalarRange {
private Scalar upper;
private Scalar lower;
public ScalarRange(Scalar upper, Scalar lower) {
this.upper = upper;
this.lower = lower;
}
public Scalar getUpper() {
return this.upper;
}
public Scalar getLower() {
return this.lower;
}
}

View File

@ -1,47 +1,69 @@
package org.firstinspires.ftc.teamcode.vision;
import static org.firstinspires.ftc.teamcode.util.Constants.INVALID_DETECTION;
import static org.firstinspires.ftc.teamcode.util.Constants.TAG_CAMERA;
import static org.firstinspires.ftc.teamcode.util.Constants.TARGETING_WEBCAM;
import static org.firstinspires.ftc.teamcode.util.Constants.WEBCAM_HEIGHT;
import static org.firstinspires.ftc.teamcode.util.Constants.WEBCAM_ROTATION;
import static org.firstinspires.ftc.teamcode.util.Constants.WEBCAM_WIDTH;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.tearabite.ielib.localization.AprilTagPoseEstimator;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import org.opencv.core.Point;
import org.openftc.easyopencv.OpenCvCamera;
import org.openftc.easyopencv.OpenCvCameraFactory;
public class Camera {
private final HardwareMap hardwareMap;
private OpenCvCamera targetingCamera;
private ColorDetectionPipeline targetingPipeline;
private PropDetectionPipeline prop;
private boolean targetingCameraInitialized;
private AprilTagProcessor aprilTag;
private VisionPortal aprilTagPortal;
private VisionPortal propPortal;
private boolean initialized;
private static final Pose2d TAG_CAMERA_OFFSETS = new Pose2d(8.27, 0, 0);
private AprilTagPoseEstimator aprilTagPoseEstimator;
public static float PROP_REJECTION_VERTICAL_UPPER = 480f * 0.33f;
public static float PROP_REJECTION_VERTICAL_LOWER = 440;
// Constructor
public Camera(HardwareMap hardwareMap) {
this.hardwareMap = hardwareMap;
this.init(hardwareMap);
}
// Initiate the Targeting Camera
public void initTargetingCamera() {
int targetingCameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
this.targetingCamera = OpenCvCameraFactory.getInstance().createWebcam(hardwareMap.get(WebcamName.class, TARGETING_WEBCAM), targetingCameraMonitorViewId);
this.targetingPipeline = new ColorDetectionPipeline();
targetingCamera.setPipeline(targetingPipeline);
targetingCamera.openCameraDeviceAsync(new OpenCvCamera.AsyncCameraOpenListener() {
@Override
public void onOpened() {
targetingCamera.startStreaming(WEBCAM_WIDTH, WEBCAM_HEIGHT, WEBCAM_ROTATION);
targetingCameraInitialized = true;
}
private void init(HardwareMap hardwareMap) {
this.aprilTagPoseEstimator = AprilTagPoseEstimator.builder()
.robotOffset(TAG_CAMERA_OFFSETS)
.build();
this.aprilTag = new AprilTagProcessor.Builder()
.setDrawAxes(true)
.setDrawCubeProjection(true)
.setDrawTagID(true)
.setDrawTagOutline(true)
.build();
int[] viewportIds = VisionPortal.makeMultiPortalView(2, VisionPortal.MultiPortalLayout.HORIZONTAL);
@Override
public void onError(int errorCode) {
VisionPortal.Builder aprilTagVisionPortalBuilder = new VisionPortal.Builder();
aprilTagVisionPortalBuilder.setCamera(hardwareMap.get(WebcamName.class, TAG_CAMERA));
aprilTagVisionPortalBuilder.setLiveViewContainerId(viewportIds[0]);
aprilTagVisionPortalBuilder.setAutoStopLiveView(true);
aprilTagVisionPortalBuilder.addProcessor(aprilTag);
this.aprilTagPortal = aprilTagVisionPortalBuilder.build();
}
});
this.prop = new PropDetectionPipeline();
VisionPortal.Builder propVisionPortalBuilder = new VisionPortal.Builder();
propVisionPortalBuilder.setCamera(hardwareMap.get(WebcamName.class, TARGETING_WEBCAM));
propVisionPortalBuilder.setLiveViewContainerId(viewportIds[1]);
propVisionPortalBuilder.setAutoStopLiveView(true);
propVisionPortalBuilder.addProcessor(prop);
this.propPortal = propVisionPortalBuilder.build();
this.propPortal.resumeLiveView();
this.aprilTagPortal.resumeLiveView();
this.initialized = true;
}
// Close the Targeting Camera
@ -53,7 +75,7 @@ public class Camera {
// Get the Red Goal Detection
public Detection getRed() {
return targetingCameraInitialized ? targetingPipeline.getRed() : INVALID_DETECTION;
return targetingCameraInitialized ? prop.getRed() : INVALID_DETECTION;
}
public StarterPosition getStartingPosition() {
@ -85,7 +107,7 @@ public class Camera {
}
public Detection getBlue() {
return targetingCameraInitialized ? targetingPipeline.getBlue() : INVALID_DETECTION;
return targetingCameraInitialized ? prop.getBlue() : INVALID_DETECTION;
}
public StarterPositionBlue getStartingPositionBlue() {
@ -112,7 +134,6 @@ public class Camera {
}
return StarterPositionBlue.UNKNOWN;
}
@ -124,4 +145,19 @@ public class Camera {
UNKNOWN, LEFT, CENTER, RIGHT
}
public AprilTagDetection getBestAprilTagDetection() {
return this.aprilTag.getDetections()
.stream().max((d1, d2) -> Float.compare(d2.decisionMargin, d1.decisionMargin))
.orElse(null);
}
public Pose2d estimatePoseFromAprilTag() {
AprilTagDetection detection = this.getBestAprilTagDetection();
if (detection == null) {
return null;
}
return this.aprilTagPoseEstimator.estimatePose(detection);
}
}

View File

@ -0,0 +1,142 @@
package org.firstinspires.ftc.teamcode.vision;
import static org.firstinspires.ftc.teamcode.util.Colors.FTC_BLUE_RANGE;
import static org.firstinspires.ftc.teamcode.util.Colors.FTC_RED_RANGE_1;
import static org.firstinspires.ftc.teamcode.util.Colors.FTC_RED_RANGE_2;
import static org.firstinspires.ftc.teamcode.util.Colors.RED;
import static org.firstinspires.ftc.teamcode.util.Colors.WHITE;
import static org.firstinspires.ftc.teamcode.util.Configurables.camerStuff.DETECTION_AREA_MAX;
import static org.firstinspires.ftc.teamcode.util.Configurables.camerStuff.DETECTION_AREA_MIN;
import static org.firstinspires.ftc.teamcode.util.Constants.ANCHOR;
import static org.firstinspires.ftc.teamcode.util.Constants.BLACK;
import static org.firstinspires.ftc.teamcode.util.Constants.BLUR_SIZE;
import static org.firstinspires.ftc.teamcode.util.Constants.ERODE_DILATE_ITERATIONS;
import static org.firstinspires.ftc.teamcode.util.Constants.STRUCTURING_ELEMENT;
import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.getLargestContour;
import static org.firstinspires.ftc.teamcode.vision.Camera.PROP_REJECTION_VERTICAL_LOWER;
import static org.firstinspires.ftc.teamcode.vision.Camera.PROP_REJECTION_VERTICAL_UPPER;
import android.graphics.Canvas;
import com.tearabite.ielib.vision.ScalarRange;
import org.firstinspires.ftc.robotcore.internal.camera.calibration.CameraCalibration;
import org.firstinspires.ftc.teamcode.util.CenterStageCommon;
import org.firstinspires.ftc.vision.VisionProcessor;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import java.util.ArrayList;
public class PropDetectionPipeline implements VisionProcessor {
CenterStageCommon.Alliance alliance;
private Mat blurred = new Mat();
private Mat hsv = new Mat();
private Mat mask = new Mat();
private Mat tmpMask = new Mat();
private Detection red;
private Detection blue;
// Init
@Override
public void init(int width, int height, CameraCalibration calibration) {
this.red = new Detection(new Size(width, height), DETECTION_AREA_MIN, DETECTION_AREA_MAX);
this.blue = new Detection(new Size(width, height), DETECTION_AREA_MIN, DETECTION_AREA_MAX);
}
// Process each frame that is received from the webcam
@Override
public Object processFrame(Mat input, long captureTimeNanos) {
Imgproc.GaussianBlur(input, blurred, BLUR_SIZE, 0);
Imgproc.cvtColor(blurred, hsv, Imgproc.COLOR_RGB2HSV);
if (alliance == CenterStageCommon.Alliance.Red) {
red.setContour(detect(FTC_RED_RANGE_1, FTC_RED_RANGE_2));
}
if (alliance == CenterStageCommon.Alliance.Blue) {
blue.setContour(detect(FTC_BLUE_RANGE));
}
return input;
}
private Mat zeros;
private Mat zeros(Size size, int type) {
if (this.zeros == null) {
this.zeros = Mat.zeros(size, type);
}
return this.zeros;
}
private MatOfPoint detect(ScalarRange... colorRanges) {
mask.release();
for (ScalarRange colorRange : colorRanges) {
Core.inRange(hsv, colorRange.getLower(), colorRange.getUpper(), tmpMask);
if (mask.empty() || mask.rows() <= 0) {
Core.inRange(hsv, colorRange.getLower(), colorRange.getUpper(), mask);
}
Core.add(mask, tmpMask, mask);
}
Imgproc.rectangle(mask, new Point(0, 0), new Point(mask.cols() - 1, (int) PROP_REJECTION_VERTICAL_UPPER), BLACK, -1);
Imgproc.rectangle(mask, new Point(0, (int) PROP_REJECTION_VERTICAL_LOWER), new Point(mask.cols() - 1, mask.rows() - 1), BLACK, -1);
Imgproc.erode(mask, mask, STRUCTURING_ELEMENT, ANCHOR, ERODE_DILATE_ITERATIONS);
Imgproc.dilate(mask, mask, STRUCTURING_ELEMENT, ANCHOR, ERODE_DILATE_ITERATIONS);
ArrayList<MatOfPoint> contours = new ArrayList<>();
Imgproc.findContours(mask, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
return getLargestContour(contours);
}
@Override
public void onDrawFrame(Canvas canvas, int onscreenWidth, int onscreenHeight, float scaleBmpPxToCanvasPx, float scaleCanvasDensity, Object userContext) {
canvas.drawLine(0, PROP_REJECTION_VERTICAL_LOWER, canvas.getWidth(), PROP_REJECTION_VERTICAL_LOWER, WHITE);
canvas.drawLine(0, PROP_REJECTION_VERTICAL_UPPER, canvas.getWidth(), PROP_REJECTION_VERTICAL_UPPER, WHITE);
if (red != null && red.isValid()) {
Point center = red.getCenterPx();
if (center.y < PROP_REJECTION_VERTICAL_LOWER
&& center.y > PROP_REJECTION_VERTICAL_UPPER) {
canvas.drawCircle((float) red.getCenterPx().x, (float) red.getCenterPx().y, 10, WHITE);
} else {
canvas.drawCircle((float) red.getCenterPx().x, (float) red.getCenterPx().y, 10, RED);
}
}
if (blue != null && blue.isValid()) {
Point center = blue.getCenterPx();
if (center.y < PROP_REJECTION_VERTICAL_LOWER
&& center.y > PROP_REJECTION_VERTICAL_UPPER) {
canvas.drawCircle((float) blue.getCenterPx().x, (float) blue.getCenterPx().y, 10, WHITE);
} else {
canvas.drawCircle((float) blue.getCenterPx().x, (float) blue.getCenterPx().y, 10, RED);
}
}
}
public Detection getRed() {
return this.red;
}
public Detection getBlue() {
return this.blue;
}
public void setAlliance(CenterStageCommon.Alliance alliance) {
this.alliance = alliance;
}
public CenterStageCommon.Alliance getAlliance() {
return this.alliance;
}
}

View File

@ -21,6 +21,8 @@ dependencies {
runtimeOnly 'org.tensorflow:tensorflow-lite:2.12.0'
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'com.acmerobotics.dashboard:dashboard:0.4.12'
implementation project(':ielib-core')
implementation project(':ielib')
compileOnly 'org.projectlombok:lombok:1.18.30'
annotationProcessor 'org.projectlombok:lombok:1.18.30'

View File

@ -1,2 +1,6 @@
include ':FtcRobotController'
include ':TeamCode'
include ':ielib-core'
project(':ielib-core').projectDir = new File('../ielib-core')
include ':ielib'
project(':ielib').projectDir = new File('../ielib')