Working left auto

This commit is contained in:
Scott Barnes 2023-11-18 14:38:00 -06:00
parent 3f9e39e45b
commit c35ed27a09
3 changed files with 54 additions and 40 deletions

View File

@ -24,7 +24,7 @@ public abstract class AutoBase extends LinearOpMode {
public static int DEPOSIT_HEIGHT = 100; public static int DEPOSIT_HEIGHT = 100;
public static double SCORING_DURATION_S = 5f; public static double SCORING_DURATION_S = 5f;
public static double APRIL_TAG_RIGHT_DELTA = -8.5; public static double APRIL_TAG_RIGHT_DELTA = -8.5;
public static double APRIL_TAG_LEFT_DELTA = 4.0; public static double APRIL_TAG_LEFT_DELTA = 8.0;
protected Robot robot; protected Robot robot;
protected FtcDashboard dashboard; protected FtcDashboard dashboard;
@ -59,8 +59,12 @@ public abstract class AutoBase extends LinearOpMode {
TrajectorySequenceBuilder builder; TrajectorySequenceBuilder builder;
switch (this.propLocation) { switch (this.propLocation) {
case Left: case Left:
// TODO Tommy: Place the pixel on the left tape and move to rendezvous position dislodgePropAndPlacePixelLeft();
return;
builder = this.robot.getTrajectorySequenceBuilder();
builder.lineToLinearHeading(rendezvous);
this.robot.getDrive().followTrajectorySequence(builder.build());
break;
case Unknown: case Unknown:
case Center: case Center:
dislodgePropAndPlacePixelCenter(); dislodgePropAndPlacePixelCenter();
@ -85,6 +89,19 @@ public abstract class AutoBase extends LinearOpMode {
// TODO Tommy: Park // TODO Tommy: Park
} }
private void dislodgePropAndPlacePixelLeft() {
TrajectorySequenceBuilder builder = this.robot.getTrajectorySequenceBuilder();
builder.lineToLinearHeading(new Pose2d(-52, 31, Math.toRadians(-180)));
builder.lineToConstantHeading(new Vector2d(-42, 31));
builder.addTemporalMarker(0.2, () -> {
this.robot.getClaw().setArmPosition(PICKUP_ARM_MIN);
});
this.robot.getDrive().followTrajectorySequence(builder.build());
this.robot.getClaw().openSync();
this.sleep(100);
this.robot.getClaw().setArmPosition(PICKUP_ARM_MAX);
}
private void dislodgePropAndPlacePixelRight() { private void dislodgePropAndPlacePixelRight() {
TrajectorySequenceBuilder builder = this.robot.getTrajectorySequenceBuilder(); TrajectorySequenceBuilder builder = this.robot.getTrajectorySequenceBuilder();

View File

@ -1,11 +1,9 @@
package org.firstinspires.ftc.teamcode.hardware; package org.firstinspires.ftc.teamcode.hardware;
import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.DETECTION_CENTER_X;
import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.DETECTION_LEFT_X;
import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.DETECTION_RIGHT_X;
import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.WEBCAM_NAME; import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.WEBCAM_NAME;
import static org.firstinspires.ftc.teamcode.util.Constants.INVALID_DETECTION; import static org.firstinspires.ftc.teamcode.util.Constants.INVALID_DETECTION;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.robotcore.hardware.HardwareMap; import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.robotcore.external.Telemetry; import org.firstinspires.ftc.robotcore.external.Telemetry;
@ -16,8 +14,11 @@ import org.firstinspires.ftc.teamcode.vision.PropDetectionPipeline;
import org.firstinspires.ftc.vision.VisionPortal; import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection; import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor; import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import org.opencv.core.Point;
@Config
public class Camera { public class Camera {
public static float PROP_REJECTION_VERTICAL = 300;
private PropDetectionPipeline prop; private PropDetectionPipeline prop;
private AprilTagProcessor aprilTag; private AprilTagProcessor aprilTag;
private VisionPortal visionPortal; private VisionPortal visionPortal;
@ -48,39 +49,28 @@ public class Camera {
return INVALID_DETECTION; return INVALID_DETECTION;
} }
Detection detection = null;
switch (getAlliance()) { switch (getAlliance()) {
case Blue: case Blue:
Detection blue = this.prop.getBlue(); Detection blue = this.prop.getBlue();
return blue != null && blue.isValid() ? blue : INVALID_DETECTION; detection = blue != null && blue.isValid() ? blue : INVALID_DETECTION;
break;
case Red: case Red:
Detection red = this.prop.getRed(); Detection red = this.prop.getRed();
return red != null && red.isValid() ? red : INVALID_DETECTION; detection = red != null && red.isValid() ? red : INVALID_DETECTION;
break;
}
if (detection != null && detection.isValid()) {
Point center = detection.getCenterPx();
if (center.y < PROP_REJECTION_VERTICAL) {
return detection;
}
} }
return INVALID_DETECTION; return INVALID_DETECTION;
} }
public CenterStageCommon.PropLocation getPropLocation() {
Detection prop = this.getProp();
if (prop == null || !prop.isValid()) {
return CenterStageCommon.PropLocation.Unknown;
}
double x = prop.getCenter().x + 50;
if (x <= DETECTION_LEFT_X) {
return CenterStageCommon.PropLocation.Left;
}
if (x <= DETECTION_CENTER_X) {
return CenterStageCommon.PropLocation.Center;
}
if (x <= DETECTION_RIGHT_X) {
return CenterStageCommon.PropLocation.Right;
}
return CenterStageCommon.PropLocation.Unknown;
}
public AprilTagDetection getAprilTag(int id) { public AprilTagDetection getAprilTag(int id) {
return this.aprilTag.getDetections() return this.aprilTag.getDetections()
.stream() .stream()

View File

@ -1,5 +1,6 @@
package org.firstinspires.ftc.teamcode.vision; package org.firstinspires.ftc.teamcode.vision;
import static org.firstinspires.ftc.teamcode.hardware.Camera.PROP_REJECTION_VERTICAL;
import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.DETECTION_AREA_MAX; import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.DETECTION_AREA_MAX;
import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.DETECTION_AREA_MIN; import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.DETECTION_AREA_MIN;
import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.DETECTION_CENTER_X; import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.DETECTION_CENTER_X;
@ -8,6 +9,7 @@ import static org.firstinspires.ftc.teamcode.hardware.RobotConfig.DETECTION_RIGH
import static org.firstinspires.ftc.teamcode.util.Colors.FTC_BLUE_RANGE; import static org.firstinspires.ftc.teamcode.util.Colors.FTC_BLUE_RANGE;
import static org.firstinspires.ftc.teamcode.util.Colors.FTC_RED_RANGE_1; import static org.firstinspires.ftc.teamcode.util.Colors.FTC_RED_RANGE_1;
import static org.firstinspires.ftc.teamcode.util.Colors.FTC_RED_RANGE_2; import static org.firstinspires.ftc.teamcode.util.Colors.FTC_RED_RANGE_2;
import static org.firstinspires.ftc.teamcode.util.Colors.RED;
import static org.firstinspires.ftc.teamcode.util.Colors.WHITE; import static org.firstinspires.ftc.teamcode.util.Colors.WHITE;
import static org.firstinspires.ftc.teamcode.util.Constants.ANCHOR; import static org.firstinspires.ftc.teamcode.util.Constants.ANCHOR;
import static org.firstinspires.ftc.teamcode.util.Constants.BLUR_SIZE; import static org.firstinspires.ftc.teamcode.util.Constants.BLUR_SIZE;
@ -24,6 +26,7 @@ import org.firstinspires.ftc.vision.VisionProcessor;
import org.opencv.core.Core; import org.opencv.core.Core;
import org.opencv.core.Mat; import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint; import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Size; import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc; import org.opencv.imgproc.Imgproc;
@ -99,20 +102,24 @@ public class PropDetectionPipeline implements VisionProcessor {
@Override @Override
public void onDrawFrame(Canvas canvas, int onscreenWidth, int onscreenHeight, float scaleBmpPxToCanvasPx, float scaleCanvasDensity, Object userContext) { public void onDrawFrame(Canvas canvas, int onscreenWidth, int onscreenHeight, float scaleBmpPxToCanvasPx, float scaleCanvasDensity, Object userContext) {
int detectionLeftXPx = (int)((DETECTION_LEFT_X / 100) * onscreenWidth); canvas.drawLine(0, PROP_REJECTION_VERTICAL, canvas.getWidth(), PROP_REJECTION_VERTICAL, WHITE);
int detectionCenterXPx = (int)((DETECTION_CENTER_X / 100) * onscreenWidth);
int detectionRightXPx = (int)((DETECTION_RIGHT_X / 100) * onscreenWidth);
canvas.drawLine(detectionLeftXPx, 0, detectionLeftXPx, canvas.getHeight(), WHITE); if (red != null && red.isValid()) {
canvas.drawLine(detectionCenterXPx, 0, detectionCenterXPx, canvas.getHeight(), WHITE); Point center = red.getCenterPx();
canvas.drawLine(detectionRightXPx, 0, detectionRightXPx, canvas.getHeight(), WHITE); if (center.y < PROP_REJECTION_VERTICAL) {
if (red.isValid()) {
canvas.drawCircle((float)red.getCenterPx().x, (float)red.getCenterPx().y, 10, WHITE); canvas.drawCircle((float)red.getCenterPx().x, (float)red.getCenterPx().y, 10, WHITE);
} else {
canvas.drawCircle((float)red.getCenterPx().x, (float)red.getCenterPx().y, 10, RED);
}
} }
if (blue.isValid()) { if (blue != null && blue.isValid()) {
Point center = blue.getCenterPx();
if (center.y < PROP_REJECTION_VERTICAL) {
canvas.drawCircle((float)blue.getCenterPx().x, (float)blue.getCenterPx().y, 10, WHITE); canvas.drawCircle((float)blue.getCenterPx().x, (float)blue.getCenterPx().y, 10, WHITE);
} else {
canvas.drawCircle((float)blue.getCenterPx().x, (float)blue.getCenterPx().y, 10, RED);
}
} }
} }
} }