auto stuff

This commit is contained in:
ImposterVarunoverlord 2024-01-08 17:51:48 -06:00
parent 7691939779
commit 769d9e3f0e
20 changed files with 585 additions and 251 deletions

View File

@ -41,10 +41,10 @@ public class Arm {
}
public Arm(HardwareMap hardwareMap) {
wristServo = hardwareMap.get(Servo.class, "Wrist Servo");
doorServo = hardwareMap.get(Servo.class, "Door Servo");
lAServo = hardwareMap.get(Servo.class, "Left Arm Servo");
rAServo = hardwareMap.get(Servo.class, "Right Arm Servo ");
wristServo = hardwareMap.get(Servo.class, "Wrist");
doorServo = hardwareMap.get(Servo.class, "Door");
lAServo = hardwareMap.get(Servo.class, "LeftArm");
rAServo = hardwareMap.get(Servo.class, "RightArm");
// lAServo.setDirection(Servo.Direction.REVERSE);
rAServo.setDirection(Servo.Direction.REVERSE);
doorServo.setDirection(Servo.Direction.REVERSE);

View File

@ -1,129 +1,96 @@
package org.firstinspires.ftc.teamcode.hardware;
import static org.firstinspires.ftc.teamcode.util.Constants.INVALID_DETECTION;
import static org.firstinspires.ftc.teamcode.util.Constants.TARGETING_WEBCAM;
import static org.firstinspires.ftc.teamcode.util.Constants.WEBCAM_HEIGHT;
import static org.firstinspires.ftc.teamcode.util.Constants.WEBCAM_ROTATION;
import static org.firstinspires.ftc.teamcode.util.Constants.WEBCAM_WIDTH;
import static org.firstinspires.ftc.teamcode.util.Constants.WEBCAM_NAME;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.teamcode.util.CameraPosition;
import org.firstinspires.ftc.teamcode.vision.AprilTagDetectionPipeline;
import org.firstinspires.ftc.teamcode.util.CenterStageCommon;
import org.firstinspires.ftc.teamcode.vision.Detection;
import org.firstinspires.ftc.teamcode.vision.TargetingPipeline;
import org.openftc.apriltag.AprilTagDetection;
import org.openftc.easyopencv.OpenCvCamera;
import org.openftc.easyopencv.OpenCvCameraFactory;
import org.firstinspires.ftc.teamcode.vision.PropDetectionPipeline;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import com.acmerobotics.dashboard.FtcDashboard;
import java.util.ArrayList;
// Class for the camera
@Config
public class Camera {
private HardwareMap hardwareMap;
private OpenCvCamera targetingCamera;
private TargetingPipeline targetingPipeline;
private boolean targetingCameraInitialized;
public static float PROP_REJECTION_VERTICAL_UPPER = 175;
public static float PROP_REJECTION_VERTICAL_LOWER = 300;
private PropDetectionPipeline prop;
private AprilTagProcessor aprilTag;
private VisionPortal visionPortal;
private Telemetry telemetry;
private float decimation;
private boolean needToSetDecimation;
int numFramesWithoutDetection = 0;
private boolean signalWebcamInitialized;
private OpenCvCamera signalWebcam;
AprilTagDetectionPipeline aprilTagDetectionPipeline;
ArrayList<AprilTagDetection> detections;
static final double FEET_PER_METER = 3.28084;
public CameraPosition cameraPosition;
private final Object decimationSync = new Object();
final float DECIMATION_HIGH = 3;
final float DECIMATION_LOW = 2;
final float THRESHOLD_HIGH_DECIMATION_RANGE_METERS = 1.0f;
final int THRESHOLD_NUM_FRAMES_NO_DETECTION_BEFORE_LOW_DECIMATION = 4;
private boolean initialized;
// Constructor
public Camera(HardwareMap hardwareMap) {
this.hardwareMap = hardwareMap;
this.telemetry = telemetry;
this.init(hardwareMap);
}
// Initiate the Targeting Camera
public void initTargetingCamera() {
int targetingCameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
this.targetingCamera = OpenCvCameraFactory.getInstance().createWebcam(hardwareMap.get(WebcamName.class, TARGETING_WEBCAM), targetingCameraMonitorViewId);
this.targetingPipeline = new TargetingPipeline();
targetingCamera.setPipeline(targetingPipeline);
targetingCamera.openCameraDeviceAsync(new OpenCvCamera.AsyncCameraOpenListener() {
@Override
public void onOpened() {
targetingCamera.startStreaming(WEBCAM_WIDTH, WEBCAM_HEIGHT, WEBCAM_ROTATION);
targetingCameraInitialized = true;
FtcDashboard.getInstance().startCameraStream(signalWebcam, 0);
}
@Override
public void onError(int errorCode) {
}
});
private void init(HardwareMap hardwareMap) {
this.aprilTag = new AprilTagProcessor.Builder()
.setDrawAxes(true)
.setDrawCubeProjection(true)
.setDrawTagID(true)
.setDrawTagOutline(true)
.build();
this.prop = new PropDetectionPipeline();
this.visionPortal = VisionPortal.easyCreateWithDefaults(
hardwareMap.get(WebcamName.class, WEBCAM_NAME), aprilTag, prop);
this.initialized = true;
}
// Close the Targeting Camera
public void stopTargetingCamera() {
if (targetingCameraInitialized) {
targetingCamera.closeCameraDeviceAsync(() -> targetingCameraInitialized = false);
public Detection getProp() {
if (!initialized || getAlliance() == null) {
return INVALID_DETECTION;
}
}
// detect colors
public Detection getRed() {
return (targetingCameraInitialized ? targetingPipeline.getRed() : INVALID_DETECTION);
}
public Detection getBlue() {
return (targetingCameraInitialized ? targetingPipeline.getBlue() : INVALID_DETECTION);
}
//return frame rate
public int getFrameCount() {
if (targetingCameraInitialized) {
return targetingCamera.getFrameCount();
} else {
return 0;
switch (getAlliance()) {
case Blue:
Detection blue = this.prop.getBlue();
return blue != null && blue.isValid() ? blue : INVALID_DETECTION;
case Red:
Detection red = this.prop.getRed();
return red != null && red.isValid() ? red : INVALID_DETECTION;
}
return INVALID_DETECTION;
}
public int getMarkerId() {
detections = aprilTagDetectionPipeline.getLatestDetections();
public AprilTagDetection getAprilTag(int id) {
return this.aprilTag.getDetections()
.stream()
.filter(x -> x.id == id)
.findFirst()
.orElse(null);
}
// If there's been a new frame...
if (detections != null) {
// If we don't see any tags
if (detections.size() == 0) {
numFramesWithoutDetection++;
// If we haven't seen a tag for a few frames, lower the decimation
// so we can hopefully pick one up if we're e.g. far back
if (numFramesWithoutDetection >= THRESHOLD_NUM_FRAMES_NO_DETECTION_BEFORE_LOW_DECIMATION) {
aprilTagDetectionPipeline.setDecimation(DECIMATION_LOW);
public double getDistanceToAprilTag(int id, double rejectAbove, double rejectBelow) {
for (int i = 0; i < 10; i++) {
AprilTagDetection aprilTagDetection = getAprilTag(id);
if (aprilTagDetection != null) {
if (aprilTagDetection.ftcPose.y < rejectAbove
&& aprilTagDetection.ftcPose.y > rejectBelow) {
return aprilTagDetection.ftcPose.y;
}
}
// We do see tags!
else {
numFramesWithoutDetection = 0;
// If the target is within 1 meter, turn on high decimation to
// increase the frame rate
if (detections.get(0).pose.z < THRESHOLD_HIGH_DECIMATION_RANGE_METERS) {
aprilTagDetectionPipeline.setDecimation(DECIMATION_HIGH);
}
return detections.get(0).id;
}
}
return -1;
return Double.MAX_VALUE;
}
public void setAlliance(CenterStageCommon.Alliance alliance) {
this.prop.setAlliance(alliance);
}
public CenterStageCommon.Alliance getAlliance() {
return this.prop != null ? this.prop.getAlliance() : null;
}
public boolean isCameraReady() {
return this.visionPortal.getCameraState() == VisionPortal.CameraState.CAMERA_DEVICE_READY;
}
}

View File

@ -34,15 +34,15 @@ public class Intake {
public static double stack2 = 0.03;
public static double stack3 = 0.06;
public static double stack4 = 0.09;
public static double stack5 = 0.12;
public static double stack5 = 0.1;
public static double up = 0.30;
public static double motorPower = 0;
public Intake(HardwareMap hardwareMap, Position up) {
lServo = hardwareMap.get(Servo.class, "Left Intake Servo");
lServo = hardwareMap.get(Servo.class, "LeftIntake");
lServo.setDirection(Servo.Direction.REVERSE);
rServo = hardwareMap.get(Servo.class, "Right Intake Servo");
dcMotor = hardwareMap.get(DcMotor.class, "Intake Motor");
dcMotor = hardwareMap.get(DcMotor.class, "Intakemotor");
}
public void setpos(Position stack) {

View File

@ -34,7 +34,6 @@ public class Robot {
public Robot(HardwareMap hardwareMap) {
drive = new SampleMecanumDrive(hardwareMap);
camera = new Camera(hardwareMap);
camera.initTargetingCamera();
intake = new Intake(hardwareMap, UP);
slides = new Slides(hardwareMap);
arm = new Arm(hardwareMap);

View File

@ -31,18 +31,18 @@ public class Slides {
private int target = 0;
public static int manualSpeed = 20;
public static int manualSpeed = 50;
public enum Position { DOWN, TIER1, TIER2, TIER3 }
public Slides(HardwareMap hardwareMap) {
slide = hardwareMap.get(DcMotor.class, "Right Slide Motor");
slide = hardwareMap.get(DcMotor.class, "Rightslide");
slide.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
slide.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
slide.setDirection(DcMotorSimple.Direction.REVERSE);
slide.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
slide2 = hardwareMap.get(DcMotor.class, "Left Slide Motor");
slide2 = hardwareMap.get(DcMotor.class, "Leftslide");
slide2.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
slide2.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// slide2.setDirection(DcMotorSimple.Direction.REVERSE);

View File

@ -9,20 +9,20 @@ public class endGame_Mechs {
private Servo servo;
private Servo hang1;
private Servo hang2;
public static double initPos = 0;
public static double launchPos = 0.5;
public static double hold = 0;
public static double initPos = 0.4;
public static double launchPos = 0.8;
public static double hold = 0.8;
public static double release = 0.5;
public static double hold2 = 0;
public static double release2 = 0.5;
public static double hold2 = 0.8;
public static double release2 = 0.8;
public endGame_Mechs(HardwareMap hardwareMap) {
this.servo = hardwareMap.get(Servo.class, "Drone Launcher");
this.servo = hardwareMap.get(Servo.class, "Drone");
this.servo.setPosition(initPos);
this.hang1 = hardwareMap.get(Servo.class, "Hanger 1");
this.hang1.setPosition(hold);
this.hang2 = hardwareMap.get(Servo.class, "Hanger 2");
this.hang2.setPosition(hold);
// this.hang1 = hardwareMap.get(Servo.class, "Hanger 1");
// this.hang1.setPosition(hold);
// this.hang2 = hardwareMap.get(Servo.class, "Hanger 2");
// this.hang2.setPosition(hold);
}

View File

@ -1,12 +1,15 @@
package org.firstinspires.ftc.teamcode.opmode.autonomous;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.teamcode.hardware.Robot;
import org.firstinspires.ftc.teamcode.util.CenterStageCommon;
import org.firstinspires.ftc.teamcode.vision.Detection;
@Config
public abstract class AutoBase extends LinearOpMode {
public Robot robot;
@ -18,6 +21,7 @@ public abstract class AutoBase extends LinearOpMode {
public abstract void createTrajectories();
public abstract void followTrajectories();
public static int DetectionTest = 20;
@Override
public void runOpMode() {
// create telemetry object for both driver hub and dashboard
@ -26,19 +30,33 @@ public abstract class AutoBase extends LinearOpMode {
// initialize robot
robot = new Robot(hardwareMap);
robot.camera.setAlliance(CenterStageCommon.Alliance.Red);
// create trajectories
createTrajectories();
// wait for start
while (!isStarted() && !isStopRequested()) {
if (robot.camera.getFrameCount() < 1) {
telemetry.addLine("Initializing...");
} else {
// if (!robot.camera.isCameraReady()) {
// telemetry.addLine("Initializing...");
// } else {
telemetry.addLine("Initialized");
// Detection vndafds = robot.camera.getProp() <- returns a detection
// int fdsf = detection.getCenter().x <- x value on the screen between -50,50
double PropDetection = robot.camera.getProp().getCenter().x;
if (PropDetection <= -DetectionTest ) {
teamPropLocation = 1;
} else if (PropDetection >= -DetectionTest && PropDetection <= DetectionTest) {
teamPropLocation = 2;
} else if (PropDetection >= DetectionTest) {
teamPropLocation = 3;
}
// teamPropLocation = 1,2,3
//teamPropLocation = robot.camera.getMarkerId(); // or whatever method you end up using
telemetry.addData("Team Prop Location", teamPropLocation);
}
// }
telemetry.update();
}
@ -57,6 +75,6 @@ public abstract class AutoBase extends LinearOpMode {
}
// stop camera
robot.camera.stopTargetingCamera();
//robot.camera.stopTargetingCamera();
}
}

View File

@ -0,0 +1,168 @@
package org.firstinspires.ftc.teamcode.opmode.autonomous;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.geometry.Vector2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import org.firstinspires.ftc.teamcode.hardware.Slides;
@Autonomous(name = "Blue Backstage Auto", group = "Competition", preselectTeleOp = "Main TeleOp")
public class BlueBackStageAuto extends AutoBase {
public Trajectory scorePurple1;
public Trajectory scorePurple2;
public Trajectory scorePurple3;
public Trajectory scoreYellow1;
public Trajectory scoreYellow2;
public Trajectory scoreYellow3;
public Trajectory parkRobot1;
public Trajectory parkRobot2;
public Trajectory parkRobot3;
public Trajectory stackrun1b1;
public Trajectory stackrun1b2;
public Trajectory stackrun1b3;
@Override
public void createTrajectories() {
// set start position
Pose2d start = new Pose2d(12, 61.5, Math.toRadians(-90));
robot.drive.setPoseEstimate(start);
// create pose2d variables
// you might not need 3 instances of the deposit position, for example, however based on localization accuracy
// you might need them for each one to be slightly different
Pose2d drop1 = new Pose2d(12, 39.5, Math.toRadians(-90));
Pose2d drop2 = new Pose2d(12, 39.5, Math.toRadians(-90));
Pose2d drop3 = new Pose2d(12, 39.5, Math.toRadians(-90));
Pose2d depositPreload1 = new Pose2d(50.5, 32, Math.toRadians(-187));
Pose2d depositPreload2 = new Pose2d(50.5, 32, Math.toRadians(-187));
Pose2d depositPreload3 = new Pose2d(50.5, 32, Math.toRadians(-187));
Pose2d park1 = new Pose2d(48, 12, Math.toRadians(0));
Pose2d park2 = new Pose2d(48, 12, Math.toRadians(0));
Pose2d park3 = new Pose2d(48, 12, Math.toRadians(0));
Pose2d stack_1x1 = new Pose2d(-56, 12, Math.toRadians(0));
Pose2d stack_2x1 = new Pose2d(-56, 12, Math.toRadians(0));
Pose2d stack_3x1 = new Pose2d(-56, 12, Math.toRadians(0));
// create trajectories
scorePurple1 = robot.drive.trajectoryBuilder(start)
.lineToLinearHeading(drop1)
.build();
scorePurple2 = robot.drive.trajectoryBuilder(start)
.lineToLinearHeading(drop2)
.build();
scorePurple3 = robot.drive.trajectoryBuilder(start)
.lineToLinearHeading(drop3)
.build();
scoreYellow1 = robot.drive.trajectoryBuilder(scorePurple1.end())
.lineToLinearHeading(depositPreload1)
.build();
scoreYellow2 = robot.drive.trajectoryBuilder(scorePurple2.end())
.lineToLinearHeading(depositPreload2)
.build();
scoreYellow3 = robot.drive.trajectoryBuilder(scorePurple3.end())
.lineToLinearHeading(depositPreload3)
.build();
parkRobot1 = robot.drive.trajectoryBuilder(scoreYellow1.end())
.lineToLinearHeading(park1)
.build();
parkRobot2 = robot.drive.trajectoryBuilder(scoreYellow2.end())
.lineToLinearHeading(park2)
.build();
parkRobot3 = robot.drive.trajectoryBuilder(scoreYellow3.end())
.lineToLinearHeading(park3)
.build();
stackrun1b1 = robot.drive.trajectoryBuilder(scoreYellow1.end())
.splineToConstantHeading(new Vector2d(30, 12), Math.toRadians(0))
.lineToLinearHeading(stack_1x1)
.build();
stackrun1b2 = robot.drive.trajectoryBuilder(scoreYellow2.end())
.splineToConstantHeading(new Vector2d(30, 12), Math.toRadians(0))
.lineToLinearHeading(stack_2x1)
.build();
stackrun1b3 = robot.drive.trajectoryBuilder(scoreYellow1.end())
.splineToConstantHeading(new Vector2d(30, 12), Math.toRadians(0))
.lineToLinearHeading(stack_1x1)
.build();
}
@Override
public void followTrajectories() {
switch (macroState) {
case 0:
robot.drive.followTrajectoryAsync(teamPropLocation==1?scorePurple1:(teamPropLocation==2?scorePurple2:scorePurple3));
macroState++;
break;
// DRIVE TO TAPE
case 1:
// if drive is done move on
if (!robot.drive.isBusy()) {
macroTime = getRuntime();
macroState++;
}
break;
// RUN INTAKE
case 2:
// intake
if (getRuntime() < macroTime + 0.5) {
robot.intake.setDcMotor(-0.26);
}
// if intake is done move on
else {
robot.intake.setDcMotor(0);
robot.extendMacro(Slides.mini_tier1, getRuntime());
robot.drive.followTrajectoryAsync(teamPropLocation==1?scoreYellow1:(teamPropLocation==2?scoreYellow2:scoreYellow3));
macroState++;
}
break;
// EXTEND AND MOVE TO BACKBOARD
case 3:
// extend macro
if (robot.macroState != 0) {
robot.extendMacro(Slides.mini_tier1, getRuntime());
}
// if macro and drive are done, move on
if (robot.macroState == 0 && !robot.drive.isBusy()) {
robot.resetMacro(0, getRuntime());
macroState++;
}
break;
case 4:
robot.resetMacro(0, getRuntime());
if (robot.macroState >= 2){
// robot.drive.followTrajectoryAsync(teamPropLocation==1?parkRobot1:(teamPropLocation==2?parkRobot2:parkRobot3));
robot.drive.followTrajectoryAsync(stackrun1b2);
macroState++;
}
break;
case 5:
if(!robot.drive.isBusy()){
macroState =-1;
}
//macroState++;
break;
// PARK ROBOT
// case 6:
// // reset macro'
// if (robot.macroState != 0) {
// robot.resetMacro(0, getRuntime());
// }
// // if macro and drive are done, end auto
// if (robot.macroState == 0 && !robot.drive.isBusy()) {
// macroState=-1;
// }
// break;
}
}
}

View File

@ -1,11 +1,15 @@
package org.firstinspires.ftc.teamcode.opmode.autonomous;
import static org.firstinspires.ftc.teamcode.hardware.Intake.Position.STACK4;
import static org.firstinspires.ftc.teamcode.hardware.Intake.Position.STACK5;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.geometry.Vector2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import org.firstinspires.ftc.teamcode.hardware.Slides;
import org.firstinspires.ftc.teamcode.roadrunner.trajectorysequence.TrajectorySequenceBuilder;
@Autonomous(name = "Red Backstage Auto", group = "Competition", preselectTeleOp = "Main TeleOp")
public class RedBackStageAuto extends AutoBase {
@ -25,6 +29,10 @@ public class RedBackStageAuto extends AutoBase {
public Trajectory stackrun1b2;
public Trajectory stackrun1b3;
public Trajectory returnstackrun1b1;
public Trajectory returnstackrun1b2;
public Trajectory returnstackrun1b3;
@Override
public void createTrajectories() {
// set start position
@ -34,21 +42,23 @@ public class RedBackStageAuto extends AutoBase {
// create pose2d variables
// you might not need 3 instances of the deposit position, for example, however based on localization accuracy
// you might need them for each one to be slightly different
Pose2d drop1 = new Pose2d(12, -39.5, Math.toRadians(90));
Pose2d drop2 = new Pose2d(12, -39.5, Math.toRadians(90));
Pose2d drop3 = new Pose2d(12, -39.5, Math.toRadians(90));
Pose2d drop1 = new Pose2d(12, -37.5, Math.toRadians(90));
Pose2d drop2 = new Pose2d(12, -37.5, Math.toRadians(90));
Pose2d drop3 = new Pose2d(12, -37.5, Math.toRadians(90));
Pose2d depositPreload1 = new Pose2d(50.5, -32, Math.toRadians(187));
Pose2d depositPreload2 = new Pose2d(50.5, -32, Math.toRadians(187));
Pose2d depositPreload3 = new Pose2d(50.5, -32, Math.toRadians(187));
Pose2d depositPreload1 = new Pose2d(52.5, -32, Math.toRadians(180));
Pose2d depositPreload2 = new Pose2d(52.5, -32, Math.toRadians(180));
Pose2d depositPreload3 = new Pose2d(52.5, -32, Math.toRadians(180));
Pose2d park1 = new Pose2d(48, -12, Math.toRadians(180));
Pose2d park2 = new Pose2d(48, -12, Math.toRadians(180));
Pose2d park3 = new Pose2d(48, -12, Math.toRadians(180));
// Pose2d park1 = new Pose2d(48, -12, Math.toRadians(180));
// Pose2d park2 = new Pose2d(48, -12, Math.toRadians(180));
// Pose2d park3 = new Pose2d(48, -12, Math.toRadians(180));
//
// Pose2d toStack = new Pose2d(40,-36, Math.toRadians(180));
Pose2d stack_1x1 = new Pose2d(-56, -12, Math.toRadians(180));
Pose2d stack_2x1 = new Pose2d(-56, -12, Math.toRadians(180));
Pose2d stack_3x1 = new Pose2d(-56, -12, Math.toRadians(180));
Pose2d stack_1x1 = new Pose2d(-56, -11, Math.toRadians(180));//-36
Pose2d stack_2x1 = new Pose2d(-56, -11, Math.toRadians(180));
Pose2d stack_3x1 = new Pose2d(-56, -11, Math.toRadians(180));
// create trajectories
scorePurple1 = robot.drive.trajectoryBuilder(start)
@ -71,27 +81,41 @@ public class RedBackStageAuto extends AutoBase {
.lineToLinearHeading(depositPreload3)
.build();
parkRobot1 = robot.drive.trajectoryBuilder(scoreYellow1.end())
.lineToLinearHeading(park1)
.build();
parkRobot2 = robot.drive.trajectoryBuilder(scoreYellow2.end())
.lineToLinearHeading(park2)
.build();
parkRobot3 = robot.drive.trajectoryBuilder(scoreYellow3.end())
.lineToLinearHeading(park3)
.build();
// parkRobot1 = robot.drive.trajectoryBuilder(scoreYellow1.end())
// .lineToLinearHeading(park1)
// .build();
// parkRobot2 = robot.drive.trajectoryBuilder(scoreYellow2.end())
// .lineToLinearHeading(park2)
// .build();++
// parkRobot3 = robot.drive.trajectoryBuilder(scoreYellow3.end())
// .lineToLinearHeading(park3)
// .build();
stackrun1b1 = robot.drive.trajectoryBuilder(scoreYellow1.end())
.splineToConstantHeading(new Vector2d(30, -12), Math.toRadians(180))
.lineToLinearHeading(stack_1x1)
.splineToConstantHeading(new Vector2d(12, -10.5), Math.toRadians(180))
.lineToConstantHeading(stack_1x1.vec())
.build();
stackrun1b2 = robot.drive.trajectoryBuilder(scoreYellow2.end())
.splineToConstantHeading(new Vector2d(30, -12), Math.toRadians(180))
.splineToConstantHeading(new Vector2d(12, -10.5), Math.toRadians(180))
.lineToLinearHeading(stack_2x1)
.build();
stackrun1b3 = robot.drive.trajectoryBuilder(scoreYellow1.end())
.splineToConstantHeading(new Vector2d(30, -12), Math.toRadians(180))
stackrun1b3 = robot.drive.trajectoryBuilder(scoreYellow3.end())
.splineToConstantHeading(new Vector2d(12, -10.5), Math.toRadians(180))
.lineToLinearHeading(stack_3x1)
.build();
returnstackrun1b1 = robot.drive.trajectoryBuilder(stackrun1b1.end())
.lineToLinearHeading(stack_1x1)
.splineToConstantHeading(new Vector2d(12, -10.5), Math.toRadians(180))
.build();
returnstackrun1b2 = robot.drive.trajectoryBuilder(stackrun1b2.end())
.lineToLinearHeading(stack_2x1)
.splineToConstantHeading(new Vector2d(12, -10.5), Math.toRadians(180))
.build();
returnstackrun1b3 = robot.drive.trajectoryBuilder(stackrun1b3.end())
.lineToLinearHeading(stack_3x1)
.splineToConstantHeading(new Vector2d(12, -10.5), Math.toRadians(180))
.build();
}
@ -114,7 +138,7 @@ public class RedBackStageAuto extends AutoBase {
case 2:
// intake
if (getRuntime() < macroTime + 0.5) {
robot.intake.setDcMotor(-0.26);
robot.intake.setDcMotor(-0.46);
}
// if intake is done move on
else {
@ -138,20 +162,35 @@ public class RedBackStageAuto extends AutoBase {
break;
case 4:
robot.resetMacro(0, getRuntime());
if (robot.macroState >= 2){
// robot.drive.followTrajectoryAsync(teamPropLocation==1?parkRobot1:(teamPropLocation==2?parkRobot2:parkRobot3));
robot.drive.followTrajectoryAsync(stackrun1b2);
if (robot.macroState >= 4){
robot.drive.followTrajectoryAsync(teamPropLocation==1?stackrun1b1:(teamPropLocation==2?stackrun1b2:stackrun1b3));
macroState++;
}
break;
case 5:
robot.resetMacro(0, getRuntime());
if(!robot.drive.isBusy()){
macroState ++;
}
//macroState++;
break;
case 6:
robot.intake.setDcMotor(0.46);
robot.intake.setpos(STACK5);
macroTime = getRuntime();
macroState ++;
break;
case 7:
if (getRuntime() > macroTime + 1.5) {
robot.drive.followTrajectoryAsync(returnstackrun1b1);
macroState++;
}
break;
case 5:
if(!robot.drive.isBusy()){
macroState =-1;
}
//macroState++;
break;
case 8:
macroState = -1;
// PARK ROBOT
// case 6:
// // reset macro'

View File

@ -34,10 +34,10 @@ public class MainTeleOp extends OpMode {
this.robot = new Robot(hardwareMap);
// robot.intake.setpos(Intake.Position.STACK1);
this.robot.endGameMechs.hold();
while (robot.camera.getFrameCount() < 1) {
telemetry.addLine("Initializing...");
telemetry.update();
}
// while (robot.camera.getFrameCount() < 1) {
// telemetry.addLine("Initializing...");
// telemetry.update();
// }
telemetry.addLine("Initialized");
telemetry.update();

View File

@ -104,10 +104,10 @@ public class SampleMecanumDrive extends MecanumDrive {
// BNO055IMUUtil.remapZAxis(imu, AxisDirection.NEG_Y);
rightRear = hardwareMap.get(DcMotorEx.class, "backRight");
rightFront = hardwareMap.get(DcMotorEx.class, "frontRight");
leftFront = hardwareMap.get(DcMotorEx.class, "frontLeft");
leftRear = hardwareMap.get(DcMotorEx.class, "backLeft");
rightRear = hardwareMap.get(DcMotorEx.class, "BackRight");
rightFront = hardwareMap.get(DcMotorEx.class, "FrontRight");
leftFront = hardwareMap.get(DcMotorEx.class, "FrontLeft");
leftRear = hardwareMap.get(DcMotorEx.class, "BackLeft");
motors = Arrays.asList(leftFront, leftRear, rightRear, rightFront);

View File

@ -61,9 +61,9 @@ public class TwoWheelTrackingLocalizer extends TwoTrackingWheelLocalizer {
this.drive = drive;
parallelEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "backLeft"));
parallelEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "BackLeft"));
// parallelEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "backRight"));
perpendicularEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "frontLeft"));
perpendicularEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "FrontLeft"));
// TODO: reverse any encoders using Encoder.setDirection(Encoder.Direction.REVERSE)
perpendicularEncoder.setDirection(Encoder.Direction.REVERSE);

View File

@ -0,0 +1,6 @@
package org.firstinspires.ftc.teamcode.util;
public class CenterStageCommon {
public enum Alliance { Blue, Red }
public enum PropLocation { Unknown, Left, Center, Right }
}

View File

@ -0,0 +1,23 @@
package org.firstinspires.ftc.teamcode.util;
import android.graphics.Color;
import org.opencv.core.Scalar;
public class Colors {
// CV Color Threshold Constants
public static Scalar FTC_RED_LOWER = new Scalar(165, 80, 80);
public static Scalar FTC_RED_UPPER = new Scalar(15, 255, 255);
public static ScalarRange FTC_RED_RANGE_1 = new ScalarRange(new Scalar(180, FTC_RED_UPPER.val[1], FTC_RED_UPPER.val[2]), FTC_RED_LOWER);
public static ScalarRange FTC_RED_RANGE_2 = new ScalarRange(FTC_RED_UPPER, new Scalar(0, FTC_RED_LOWER.val[1], FTC_RED_LOWER.val[2]));
public static Scalar FTC_BLUE_LOWER = new Scalar(75, 40, 80);
public static Scalar FTC_BLUE_UPPER = new Scalar(120, 255, 255);
public static ScalarRange FTC_BLUE_RANGE = new ScalarRange(FTC_BLUE_UPPER, FTC_BLUE_LOWER);
public static Scalar FTC_WHITE_LOWER = new Scalar(0, 0, 40);
public static Scalar FTC_WHITE_UPPER = new Scalar(180, 30, 255);
public static OpenCVUtil.LinePaint RED = new OpenCVUtil.LinePaint(Color.RED);
public static OpenCVUtil.LinePaint BLUE = new OpenCVUtil.LinePaint(Color.BLUE);
public static OpenCVUtil.LinePaint BLACK = new OpenCVUtil.LinePaint(Color.BLACK);
public static OpenCVUtil.LinePaint WHITE = new OpenCVUtil.LinePaint(Color.WHITE);
}

View File

@ -78,4 +78,12 @@ public class Configurables {
public static double CV_GOAL_SIDE_ALLOWABLE_SIZE_ERROR = 100;
public static Size CV_GOAL_SIDE_ASPECT_RATIO = new Size(6.5,15.5);
public static double CV_GOAL_SIDE_ALLOWABLE_ASPECT_ERROR = 10;
public static double CAMERA_OFFSET_X = 0f;
public static double DETECTION_AREA_MIN = 0.02f;
public static double DETECTION_AREA_MAX = 0.3f;
public static double DETECTION_LEFT_X = 0;
public static double DETECTION_CENTER_X = .5;
public static double DETECTION_RIGHT_X = 1;
public static double SCORING_DISTANCE_FROM_APRIL_TAG = 6f;
}

View File

@ -43,6 +43,7 @@ public class Constants {
public static final String WHEEL_FRONT_RIGHT = "frontRight";
public static final String WHEEL_BACK_LEFT = "backLeft";
public static final String WHEEL_BACK_RIGHT = "backRight";
public static final String WEBCAM_NAME = "Targeting Webcam";
public static final String ARM = "wobbler";
public static final String CLAW = "claw";
public static final String INTAKE = "intake";

View File

@ -1,5 +1,7 @@
package org.firstinspires.ftc.teamcode.util;
import android.graphics.Paint;
import org.opencv.core.Mat;
import org.opencv.core.MatOfInt;
import org.opencv.core.MatOfPoint;
@ -11,24 +13,6 @@ import org.opencv.imgproc.Moments;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
import static org.firstinspires.ftc.teamcode.util.Configurables.CAMERA_BLUE_GOAL_LOWER;
import static org.firstinspires.ftc.teamcode.util.Configurables.CAMERA_BLUE_GOAL_UPPER;
import static org.firstinspires.ftc.teamcode.util.Configurables.CAMERA_RED_GOAL_LOWER;
import static org.firstinspires.ftc.teamcode.util.Configurables.CAMERA_RED_GOAL_UPPER;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_ALLOWABLE_AREA_ERROR;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_SIDE_ALLOWABLE_ASPECT_ERROR;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_SIDE_ALLOWABLE_SIZE_ERROR;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_ALLOWABLE_SOLIDARITY_ERROR;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_SIDE_ALLOWABLE_Y_ERROR;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_SIDE_ASPECT_RATIO;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_MIN_CONFIDENCE;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_PROPER_AREA;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_PROPER_ASPECT;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_GOAL_PROPER_HEIGHT;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_MAX_GOAL_AREA;
import static org.firstinspires.ftc.teamcode.util.Configurables.CV_MIN_GOAL_AREA;
// CV Helper Functions
public class OpenCVUtil {
@ -106,48 +90,13 @@ public class OpenCVUtil {
return contours.subList(0, Math.min(numContours, contours.size()));
}
public static MatOfPoint getHighGoalContour(List<MatOfPoint> contours) {
Collections.sort(contours, (a, b) -> (int) Imgproc.contourArea(b) - (int) Imgproc.contourArea(a));
// return null if nothing
if (contours.size() == 0) {
return null;
}
// check each contour for touching the top, aspect, and size
double properAspect = ((double) CV_GOAL_SIDE_ASPECT_RATIO.height) / ((double) CV_GOAL_SIDE_ASPECT_RATIO.width);
for (int i = 0; i < contours.size() - 1; i++) {
MatOfPoint contour = contours.get(i);
Rect rect = Imgproc.boundingRect(contour);
double area = Imgproc.contourArea(contour);
double aspect = ((double) rect.height) / ((double) rect.width);
if (rect.y <= -100 || Math.abs(properAspect - aspect) > CV_GOAL_SIDE_ALLOWABLE_ASPECT_ERROR ||
area < CV_MIN_GOAL_AREA || area > CV_MAX_GOAL_AREA) {
contours.remove(i);
i--;
}
}
// check for 2 that can be combined
int goalCounter = -1;
for (int i = 0; i < contours.size() - 1; i++) {
MatOfPoint contour1 = contours.get(i);
MatOfPoint contour2 = contours.get(i + 1);
Rect rect1 = Imgproc.boundingRect(contour1);
Rect rect2 = Imgproc.boundingRect(contour2);
double area1 = Imgproc.contourArea(contour1);
double area2 = Imgproc.contourArea(contour2);
if (Math.abs(Math.abs(rect1.y) - Math.abs(rect2.y)) < CV_GOAL_SIDE_ALLOWABLE_Y_ERROR &&
Math.abs(area1 - area2) < CV_GOAL_SIDE_ALLOWABLE_SIZE_ERROR) {
goalCounter = i;
break;
}
}
// return the results
if (goalCounter == -1) {
return contours.get(0);
} else {
MatOfPoint highGoal = new MatOfPoint();
highGoal.push_back(contours.get(goalCounter));
highGoal.push_back(contours.get(goalCounter + 1));
return highGoal;
public static class LinePaint extends Paint
{
public LinePaint(int color)
{
setColor(color);
setAntiAlias(true);
setStrokeCap(Paint.Cap.ROUND);
}
}
}
}

View File

@ -0,0 +1,21 @@
package org.firstinspires.ftc.teamcode.util;
import org.opencv.core.Scalar;
public class ScalarRange {
private Scalar upper;
private Scalar lower;
public ScalarRange(Scalar upper, Scalar lower) {
this.upper = upper;
this.lower = lower;
}
public Scalar getUpper() {
return this.upper;
}
public Scalar getLower() {
return this.lower;
}
}

View File

@ -1,12 +1,5 @@
package org.firstinspires.ftc.teamcode.vision;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import static org.firstinspires.ftc.teamcode.util.Constants.GREEN;
import static org.firstinspires.ftc.teamcode.util.Constants.INVALID_AREA;
import static org.firstinspires.ftc.teamcode.util.Constants.INVALID_POINT;
@ -17,6 +10,13 @@ import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.getBottomLeftOfCont
import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.getBottomRightOfContour;
import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.getCenterOfContour;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
// Class for a Detection
public class Detection {
private double minAreaPx;
@ -35,10 +35,10 @@ public class Detection {
this.maxAreaPx = frameSize.area();
}
public Detection(Size frameSize, double minAreaFactor, double maxSizeFactor) {
public Detection(Size frameSize, double minAreaFactor, double maxAreaFactor) {
this.maxSizePx = frameSize;
this.minAreaPx = frameSize.area() * minAreaFactor;
this.maxAreaPx = frameSize.area() * maxSizeFactor;
this.maxAreaPx = frameSize.area() * maxAreaFactor;
}
public void setMinArea(double minAreaFactor) {
@ -54,8 +54,6 @@ public class Detection {
if (isValid()) {
drawConvexHull(img, contour, color);
drawPoint(img, centerPx, GREEN);
// drawPoint(img, bottomLeftPx, GREEN);
// drawPoint(img, bottomRightPx, GREEN);
}
}
@ -64,15 +62,12 @@ public class Detection {
if (isValid()) {
fillConvexHull(img, contour, color);
drawPoint(img, centerPx, GREEN);
// drawPoint(img, bottomLeftPx, GREEN);
// drawPoint(img, bottomRightPx, GREEN);
}
}
// Check if the current Detection is valid
public boolean isValid() {
// return true;
return (this.contour != null) && (this.centerPx != INVALID_POINT) && (this.areaPx != INVALID_AREA);
return (this.contour != null) && (this.areaPx != INVALID_AREA);
}
// Get the current contour

View File

@ -0,0 +1,140 @@
package org.firstinspires.ftc.teamcode.vision;
import static org.firstinspires.ftc.teamcode.hardware.Camera.PROP_REJECTION_VERTICAL_LOWER;
import static org.firstinspires.ftc.teamcode.hardware.Camera.PROP_REJECTION_VERTICAL_UPPER;
import static org.firstinspires.ftc.teamcode.util.Colors.FTC_BLUE_RANGE;
import static org.firstinspires.ftc.teamcode.util.Colors.FTC_RED_RANGE_1;
import static org.firstinspires.ftc.teamcode.util.Colors.FTC_RED_RANGE_2;
import static org.firstinspires.ftc.teamcode.util.Colors.RED;
import static org.firstinspires.ftc.teamcode.util.Colors.WHITE;
import static org.firstinspires.ftc.teamcode.util.Configurables.DETECTION_AREA_MAX;
import static org.firstinspires.ftc.teamcode.util.Configurables.DETECTION_AREA_MIN;
import static org.firstinspires.ftc.teamcode.util.Constants.ANCHOR;
import static org.firstinspires.ftc.teamcode.util.Constants.BLACK;
import static org.firstinspires.ftc.teamcode.util.Constants.BLUR_SIZE;
import static org.firstinspires.ftc.teamcode.util.Constants.ERODE_DILATE_ITERATIONS;
import static org.firstinspires.ftc.teamcode.util.Constants.STRUCTURING_ELEMENT;
import static org.firstinspires.ftc.teamcode.util.OpenCVUtil.getLargestContour;
import android.graphics.Canvas;
import org.firstinspires.ftc.robotcore.internal.camera.calibration.CameraCalibration;
import org.firstinspires.ftc.teamcode.util.CenterStageCommon;
import org.firstinspires.ftc.teamcode.util.ScalarRange;
import org.firstinspires.ftc.vision.VisionProcessor;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfPoint;
import org.opencv.core.Point;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import java.util.ArrayList;
public class PropDetectionPipeline implements VisionProcessor {
CenterStageCommon.Alliance alliance;
private Mat blurred = new Mat();
private Mat hsv = new Mat();
private Mat mask = new Mat();
private Mat tmpMask = new Mat();
private Detection red;
private Detection blue;
// Init
@Override
public void init(int width, int height, CameraCalibration calibration) {
this.red = new Detection(new Size(width, height), DETECTION_AREA_MIN, DETECTION_AREA_MAX);
this.blue = new Detection(new Size(width, height), DETECTION_AREA_MIN, DETECTION_AREA_MAX);
}
// Process each frame that is received from the webcam
@Override
public Object processFrame(Mat input, long captureTimeNanos) {
Imgproc.GaussianBlur(input, blurred, BLUR_SIZE, 0);
Imgproc.cvtColor(blurred, hsv, Imgproc.COLOR_RGB2HSV);
if (alliance == CenterStageCommon.Alliance.Red) {
red.setContour(detect(FTC_RED_RANGE_1, FTC_RED_RANGE_2));
}
if (alliance == CenterStageCommon.Alliance.Blue) {
blue.setContour(detect(FTC_BLUE_RANGE));
}
return input;
}
private Mat zeros;
private Mat zeros(Size size, int type) {
if (this.zeros == null) {
this.zeros = Mat.zeros(size, type);
}
return this.zeros;
}
private MatOfPoint detect(ScalarRange... colorRanges) {
mask.release();
for (ScalarRange colorRange : colorRanges) {
Core.inRange(hsv, colorRange.getLower(), colorRange.getUpper(), tmpMask);
if (mask.empty() || mask.rows() <= 0) {
Core.inRange(hsv, colorRange.getLower(), colorRange.getUpper(), mask);
}
Core.add(mask, tmpMask, mask);
}
Imgproc.rectangle(mask, new Point(0,0), new Point(mask.cols() - 1, (int)PROP_REJECTION_VERTICAL_UPPER), BLACK, -1);
Imgproc.rectangle(mask, new Point(0,(int)PROP_REJECTION_VERTICAL_LOWER), new Point(mask.cols() - 1, mask.rows() -1), BLACK, -1);
Imgproc.erode(mask, mask, STRUCTURING_ELEMENT, ANCHOR, ERODE_DILATE_ITERATIONS);
Imgproc.dilate(mask, mask, STRUCTURING_ELEMENT, ANCHOR, ERODE_DILATE_ITERATIONS);
ArrayList<MatOfPoint> contours = new ArrayList<>();
Imgproc.findContours(mask, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
return getLargestContour(contours);
}
@Override
public void onDrawFrame(Canvas canvas, int onscreenWidth, int onscreenHeight, float scaleBmpPxToCanvasPx, float scaleCanvasDensity, Object userContext) {
canvas.drawLine(0, PROP_REJECTION_VERTICAL_LOWER, canvas.getWidth(), PROP_REJECTION_VERTICAL_LOWER, WHITE);
canvas.drawLine(0, PROP_REJECTION_VERTICAL_UPPER, canvas.getWidth(), PROP_REJECTION_VERTICAL_UPPER, WHITE);
if (red != null && red.isValid()) {
Point center = red.getCenterPx();
if (center.y < PROP_REJECTION_VERTICAL_LOWER
&& center.y > PROP_REJECTION_VERTICAL_UPPER) {
canvas.drawCircle((float)red.getCenterPx().x, (float)red.getCenterPx().y, 10, WHITE);
} else {
canvas.drawCircle((float)red.getCenterPx().x, (float)red.getCenterPx().y, 10, RED);
}
}
if (blue != null && blue.isValid()) {
Point center = blue.getCenterPx();
if (center.y < PROP_REJECTION_VERTICAL_LOWER
&& center.y > PROP_REJECTION_VERTICAL_UPPER) {
canvas.drawCircle((float)blue.getCenterPx().x, (float)blue.getCenterPx().y, 10, WHITE);
} else {
canvas.drawCircle((float)blue.getCenterPx().x, (float)blue.getCenterPx().y, 10, RED);
}
}
}
public Detection getRed() {
return this.red;
}
public Detection getBlue() {
return this.blue;
}
public void setAlliance(CenterStageCommon.Alliance alliance) {
this.alliance = alliance;
}
public CenterStageCommon.Alliance getAlliance() {
return this.alliance;
}
}