fixed broken code due to auto upload merge error. added variable to keep track of run time. fixed some macros, commented most the code for future drivers to learn controls and what the code does easily. I will be trying to make macros use time but I am committing and pushing this right now because I know it will go horribly wrong.

This commit is contained in:
UntitledRice 2023-11-14 11:57:15 -06:00
parent dc48b5ebf3
commit 76d8b67c01
15 changed files with 508 additions and 1313 deletions

View File

@ -1,189 +1,189 @@
/* Copyright (c) 2023 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import java.util.List;
/*
* This OpMode illustrates the basics of AprilTag recognition and pose estimation,
* including Java Builder structures for specifying Vision parameters.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: AprilTag", group = "Concept")
@Disabled
public class ConceptAprilTag extends LinearOpMode {
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
/**
* The variable to store our instance of the AprilTag processor.
*/
private AprilTagProcessor aprilTag;
/**
* The variable to store our instance of the vision portal.
*/
private VisionPortal visionPortal;
@Override
public void runOpMode() {
initAprilTag();
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
if (opModeIsActive()) {
while (opModeIsActive()) {
telemetryAprilTag();
// Push telemetry to the Driver Station.
telemetry.update();
// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}
// Share the CPU.
sleep(20);
}
}
// Save more CPU resources when camera is no longer needed.
visionPortal.close();
} // end method runOpMode()
/**
* Initialize the AprilTag processor.
*/
private void initAprilTag() {
// Create the AprilTag processor.
aprilTag = new AprilTagProcessor.Builder()
//.setDrawAxes(false)
//.setDrawCubeProjection(false)
//.setDrawTagOutline(true)
//.setTagFamily(AprilTagProcessor.TagFamily.TAG_36h11)
//.setTagLibrary(AprilTagGameDatabase.getCenterStageTagLibrary())
//.setOutputUnits(DistanceUnit.INCH, AngleUnit.DEGREES)
// == CAMERA CALIBRATION ==
// If you do not manually specify calibration parameters, the SDK will attempt
// to load a predefined calibration for your camera.
//.setLensIntrinsics(578.272, 578.272, 402.145, 221.506)
// ... these parameters are fx, fy, cx, cy.
.build();
// Create the vision portal by using a builder.
VisionPortal.Builder builder = new VisionPortal.Builder();
// Set the camera (webcam vs. built-in RC phone camera).
if (USE_WEBCAM) {
builder.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"));
} else {
builder.setCamera(BuiltinCameraDirection.BACK);
}
// Choose a camera resolution. Not all cameras support all resolutions.
//builder.setCameraResolution(new Size(640, 480));
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
//builder.enableCameraMonitoring(true);
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
// Choose whether or not LiveView stops if no processors are enabled.
// If set "true", monitor shows solid orange screen if no processors enabled.
// If set "false", monitor shows camera view without annotations.
//builder.setAutoStopLiveView(false);
// Set and enable the processor.
builder.addProcessor(aprilTag);
// Build the Vision Portal, using the above settings.
visionPortal = builder.build();
// Disable or re-enable the aprilTag processor at any time.
//visionPortal.setProcessorEnabled(aprilTag, true);
} // end method initAprilTag()
/**
* Add telemetry about AprilTag detections.
*/
private void telemetryAprilTag() {
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
telemetry.addData("# AprilTags Detected", currentDetections.size());
// Step through the list of detections and display info for each one.
for (AprilTagDetection detection : currentDetections) {
if (detection.metadata != null) {
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
} else {
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
}
} // end for() loop
// Add "key" information to telemetry
telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");
telemetry.addLine("RBE = Range, Bearing & Elevation");
} // end method telemetryAprilTag()
} // end class
///* Copyright (c) 2023 FIRST. All rights reserved.
// *
// * Redistribution and use in source and binary forms, with or without modification,
// * are permitted (subject to the limitations in the disclaimer below) provided that
// * the following conditions are met:
// *
// * Redistributions of source code must retain the above copyright notice, this list
// * of conditions and the following disclaimer.
// *
// * Redistributions in binary form must reproduce the above copyright notice, this
// * list of conditions and the following disclaimer in the documentation and/or
// * other materials provided with the distribution.
// *
// * Neither the name of FIRST nor the names of its contributors may be used to endorse or
// * promote products derived from this software without specific prior written permission.
// *
// * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
// * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
// * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// */
//
//package org.firstinspires.ftc.robotcontroller.external.samples;
//
//import com.qualcomm.robotcore.eventloop.opmode.Disabled;
//import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
//import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
//import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
//import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
//import org.firstinspires.ftc.vision.VisionPortal;
//import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
//import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
//
//import java.util.List;
//
///*
// * This OpMode illustrates the basics of AprilTag recognition and pose estimation,
// * including Java Builder structures for specifying Vision parameters.
// *
// * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
// * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
// */
//@TeleOp(name = "Concept: AprilTag", group = "Concept")
//@Disabled
//public class ConceptAprilTag extends LinearOpMode {
//
// private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
//
// /**
// * The variable to store our instance of the AprilTag processor.
// */
// private AprilTagProcessor aprilTag;
//
// /**
// * The variable to store our instance of the vision portal.
// */
// private VisionPortal visionPortal;
//
// @Override
// public void runOpMode() {
//
// initAprilTag();
//
// // Wait for the DS start button to be touched.
// telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
// telemetry.addData(">", "Touch Play to start OpMode");
// telemetry.update();
// waitForStart();
//
// if (opModeIsActive()) {
// while (opModeIsActive()) {
//
// telemetryAprilTag();
//
// // Push telemetry to the Driver Station.
// telemetry.update();
//
// // Save CPU resources; can resume streaming when needed.
// if (gamepad1.dpad_down) {
// visionPortal.stopStreaming();
// } else if (gamepad1.dpad_up) {
// visionPortal.resumeStreaming();
// }
//
// // Share the CPU.
// sleep(20);
// }
// }
//
// // Save more CPU resources when camera is no longer needed.
// visionPortal.close();
//
// } // end method runOpMode()
//
// /**
// * Initialize the AprilTag processor.
// */
// private void initAprilTag() {
//
// // Create the AprilTag processor.
// aprilTag = new AprilTagProcessor.Builder()
// //.setDrawAxes(false)
// //.setDrawCubeProjection(false)
// //.setDrawTagOutline(true)
// //.setTagFamily(AprilTagProcessor.TagFamily.TAG_36h11)
// //.setTagLibrary(AprilTagGameDatabase.getCenterStageTagLibrary())
// //.setOutputUnits(DistanceUnit.INCH, AngleUnit.DEGREES)
//
// // == CAMERA CALIBRATION ==
// // If you do not manually specify calibration parameters, the SDK will attempt
// // to load a predefined calibration for your camera.
// //.setLensIntrinsics(578.272, 578.272, 402.145, 221.506)
//
// // ... these parameters are fx, fy, cx, cy.
//
// .build();
//
// // Create the vision portal by using a builder.
// VisionPortal.Builder builder = new VisionPortal.Builder();
//
// // Set the camera (webcam vs. built-in RC phone camera).
// if (USE_WEBCAM) {
// builder.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"));
// } else {
// builder.setCamera(BuiltinCameraDirection.BACK);
// }
//
// // Choose a camera resolution. Not all cameras support all resolutions.
// //builder.setCameraResolution(new Size(640, 480));
//
// // Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
// //builder.enableCameraMonitoring(true);
//
// // Set the stream format; MJPEG uses less bandwidth than default YUY2.
// //builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
//
// // Choose whether or not LiveView stops if no processors are enabled.
// // If set "true", monitor shows solid orange screen if no processors enabled.
// // If set "false", monitor shows camera view without annotations.
// //builder.setAutoStopLiveView(false);
//
// // Set and enable the processor.
// builder.addProcessor(aprilTag);
//
// // Build the Vision Portal, using the above settings.
// visionPortal = builder.build();
//
// // Disable or re-enable the aprilTag processor at any time.
// //visionPortal.setProcessorEnabled(aprilTag, true);
//
// } // end method initAprilTag()
//
//
// /**
// * Add telemetry about AprilTag detections.
// */
// private void telemetryAprilTag() {
//
// List<AprilTagDetection> currentDetections = aprilTag.getDetections();
// telemetry.addData("# AprilTags Detected", currentDetections.size());
//
// // Step through the list of detections and display info for each one.
// for (AprilTagDetection detection : currentDetections) {
// if (detection.metadata != null) {
// telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
// telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
// telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
// telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
// } else {
// telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
// telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
// }
// } // end for() loop
//
// // Add "key" information to telemetry
// telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
// telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");
// telemetry.addLine("RBE = Range, Bearing & Elevation");
//
// } // end method telemetryAprilTag()
//
//} // end class

View File

@ -1,149 +1,149 @@
/* Copyright (c) 2023 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import java.util.List;
/*
* This OpMode illustrates the basics of AprilTag recognition and pose estimation, using
* the easy way.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: AprilTag Easy", group = "Concept")
@Disabled
public class ConceptAprilTagEasy extends LinearOpMode {
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
/**
* The variable to store our instance of the AprilTag processor.
*/
private AprilTagProcessor aprilTag;
/**
* The variable to store our instance of the vision portal.
*/
private VisionPortal visionPortal;
@Override
public void runOpMode() {
initAprilTag();
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
if (opModeIsActive()) {
while (opModeIsActive()) {
telemetryAprilTag();
// Push telemetry to the Driver Station.
telemetry.update();
// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}
// Share the CPU.
sleep(20);
}
}
// Save more CPU resources when camera is no longer needed.
visionPortal.close();
} // end method runOpMode()
/**
* Initialize the AprilTag processor.
*/
private void initAprilTag() {
// Create the AprilTag processor the easy way.
aprilTag = AprilTagProcessor.easyCreateWithDefaults();
// Create the vision portal the easy way.
if (USE_WEBCAM) {
visionPortal = VisionPortal.easyCreateWithDefaults(
hardwareMap.get(WebcamName.class, "Webcam 1"), aprilTag);
} else {
visionPortal = VisionPortal.easyCreateWithDefaults(
BuiltinCameraDirection.BACK, aprilTag);
}
} // end method initAprilTag()
/**
* Add telemetry about AprilTag detections.
*/
private void telemetryAprilTag() {
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
telemetry.addData("# AprilTags Detected", currentDetections.size());
// Step through the list of detections and display info for each one.
for (AprilTagDetection detection : currentDetections) {
if (detection.metadata != null) {
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
} else {
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
}
} // end for() loop
// Add "key" information to telemetry
telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");
telemetry.addLine("RBE = Range, Bearing & Elevation");
} // end method telemetryAprilTag()
} // end class
///* Copyright (c) 2023 FIRST. All rights reserved.
// *
// * Redistribution and use in source and binary forms, with or without modification,
// * are permitted (subject to the limitations in the disclaimer below) provided that
// * the following conditions are met:
// *
// * Redistributions of source code must retain the above copyright notice, this list
// * of conditions and the following disclaimer.
// *
// * Redistributions in binary form must reproduce the above copyright notice, this
// * list of conditions and the following disclaimer in the documentation and/or
// * other materials provided with the distribution.
// *
// * Neither the name of FIRST nor the names of its contributors may be used to endorse or
// * promote products derived from this software without specific prior written permission.
// *
// * NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
// * LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// * THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
// * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// */
//
//package org.firstinspires.ftc.robotcontroller.external.samples;
//
//import com.qualcomm.robotcore.eventloop.opmode.Disabled;
//import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
//import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
//import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
//import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
//import org.firstinspires.ftc.vision.VisionPortal;
//import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
//import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
//
//import java.util.List;
//
///*
// * This OpMode illustrates the basics of AprilTag recognition and pose estimation, using
// * the easy way.
// *
// * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
// * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
// */
//@TeleOp(name = "Concept: AprilTag Easy", group = "Concept")
//@Disabled
//public class ConceptAprilTagEasy extends LinearOpMode {
//
// private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
//
// /**
// * The variable to store our instance of the AprilTag processor.
// */
// private AprilTagProcessor aprilTag;
//
// /**
// * The variable to store our instance of the vision portal.
// */
// private VisionPortal visionPortal;
//
// @Override
// public void runOpMode() {
//
// initAprilTag();
//
// // Wait for the DS start button to be touched.
// telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
// telemetry.addData(">", "Touch Play to start OpMode");
// telemetry.update();
// waitForStart();
//
// if (opModeIsActive()) {
// while (opModeIsActive()) {
//
// telemetryAprilTag();
//
// // Push telemetry to the Driver Station.
// telemetry.update();
//
// // Save CPU resources; can resume streaming when needed.
// if (gamepad1.dpad_down) {
// visionPortal.stopStreaming();
// } else if (gamepad1.dpad_up) {
// visionPortal.resumeStreaming();
// }
//
// // Share the CPU.
// sleep(20);
// }
// }
//
// // Save more CPU resources when camera is no longer needed.
// visionPortal.close();
//
// } // end method runOpMode()
//
// /**
// * Initialize the AprilTag processor.
// */
// private void initAprilTag() {
//
// // Create the AprilTag processor the easy way.
// aprilTag = AprilTagProcessor.easyCreateWithDefaults();
//
// // Create the vision portal the easy way.
// if (USE_WEBCAM) {
// visionPortal = VisionPortal.easyCreateWithDefaults(
// hardwareMap.get(WebcamName.class, "Webcam 1"), aprilTag);
// } else {
// visionPortal = VisionPortal.easyCreateWithDefaults(
// BuiltinCameraDirection.BACK, aprilTag);
// }
//
// } // end method initAprilTag()
//
// /**
// * Add telemetry about AprilTag detections.
// */
// private void telemetryAprilTag() {
//
// List<AprilTagDetection> currentDetections = aprilTag.getDetections();
// telemetry.addData("# AprilTags Detected", currentDetections.size());
//
// // Step through the list of detections and display info for each one.
// for (AprilTagDetection detection : currentDetections) {
// if (detection.metadata != null) {
// telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
// telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
// telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
// telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
// } else {
// telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
// telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
// }
// } // end for() loop
//
// // Add "key" information to telemetry
// telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
// telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");
// telemetry.addLine("RBE = Range, Bearing & Elevation");
//
// } // end method telemetryAprilTag()
//
//} // end class

View File

@ -41,7 +41,9 @@ dependencies {
implementation project(':FtcRobotController')
annotationProcessor files('lib/OpModeAnnotationProcessor.jar')
implementation 'org.openftc:easyopencv:1.7.0'
implementation 'com.acmerobotics.roadrunner:core:0.5.5'
implementation 'org.ftclib.ftclib:vision:2.0.1'
implementation 'org.ftclib.ftclib:core:2.0.1'
implementation 'org.apache.commons:commons-math3:3.6.1'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.12.7'
implementation 'com.acmerobotics.roadrunner:core:0.5.6'
}

View File

@ -52,6 +52,7 @@ public class Arm {
this.armControllerTarget = startingArmPos;
lAServo.setPosition(startingArmPos);
rAServo.setPosition(startingArmPos);
wristServo.setPosition(startingWristPos);
} else if (tape == ArmPos.APosition.SCORE) {
this.armControllerTarget = armScorePos;
lAServo.setPosition(armScorePos);
@ -69,13 +70,13 @@ public class Arm {
}
public void setHopper(HopperPos.hopperPos hopper) {
if (hopper == HopperPos.hopperPos.UP) {
wristServo.setPosition(wristScorePos);
} else if (hopper == HopperPos.hopperPos.DOWN) {
wristServo.setPosition(startingWristPos);
}
}
// public void setHopper(HopperPos.hopperPos hopper) {
// if (hopper == HopperPos.hopperPos.UP) {
// wristServo.setPosition(wristScorePos);
// } else if (hopper == HopperPos.hopperPos.DOWN) {
// wristServo.setPosition(startingWristPos);
// }
// }
public String getTelemetry() {
return "Wrist Servo: "+wristServo.getPosition()+"Left Arm Servo: "+lAServo.getPosition()+"Right Arm Servo: "+rAServo.getPosition()+"Door Servo: "+dServo.getPosition();

View File

@ -1,101 +0,0 @@
//package org.firstinspires.ftc.teamcode.hardware;
//
//import com.acmerobotics.roadrunner.control.PIDCoefficients;
//import com.acmerobotics.roadrunner.drive.DriveSignal;
//import com.acmerobotics.roadrunner.geometry.Pose2d;
//import com.acmerobotics.roadrunner.trajectory.BaseTrajectoryBuilder;
//import com.acmerobotics.roadrunner.trajectory.TrajectoryBuilder;
//import com.acmerobotics.roadrunner.trajectory.constraints.AngularVelocityConstraint;
//import com.acmerobotics.roadrunner.trajectory.constraints.MecanumVelocityConstraint;
//import com.acmerobotics.roadrunner.trajectory.constraints.MinVelocityConstraint;
//import com.acmerobotics.roadrunner.trajectory.constraints.ProfileAccelerationConstraint;
//import com.acmerobotics.roadrunner.trajectory.constraints.TrajectoryAccelerationConstraint;
//import com.acmerobotics.roadrunner.trajectory.constraints.TrajectoryVelocityConstraint;
//import com.qualcomm.robotcore.hardware.DcMotor;
//import com.qualcomm.robotcore.hardware.DcMotorSimple;
//import com.qualcomm.robotcore.hardware.Gamepad;
//import com.qualcomm.robotcore.hardware.HardwareMap;
//
//import org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants;
//import org.firstinspires.ftc.teamcode.roadrunner.trajectorysequence.TrajectorySequenceRunner;
//
//import java.util.Arrays;
//
//public class Drive {
//
// DcMotor frontLeft;
// DcMotor frontRight;
// DcMotor backLeft;
// DcMotor backRight;
// public static PIDCoefficients TRANSLATIONAL_PID = new PIDCoefficients(10, 0, 0);
// public static PIDCoefficients HEADING_PID = new PIDCoefficients(8, 0, 0);
//
// private static final TrajectoryVelocityConstraint VEL_CONSTRAINT = getVelocityConstraint(DriveConstants.MAX_VEL, DriveConstants.MAX_ANG_VEL, DriveConstants.TRACK_WIDTH);
// private static final TrajectoryAccelerationConstraint ACCEL_CONSTRAINT = getAccelerationConstraint(DriveConstants.MAX_ACCEL);
// private TrajectorySequenceRunner trajectorySequenceRunner;
//
// public Drive(HardwareMap hardwareMap) {
// this.init(hardwareMap);
// }
//
// private void init(HardwareMap hardwareMap) {
// this.frontLeft = hardwareMap.get(DcMotor.class, "frontLeft");
// frontLeft.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
// frontLeft.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// this.frontRight = hardwareMap.get(DcMotor.class, "frontRight");
// frontRight.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
// frontRight.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// this.backLeft = hardwareMap.get(DcMotor.class, "backLeft");
// backLeft.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
// backLeft.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// this.backRight = hardwareMap.get(DcMotor.class, "backRight");
// backRight.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
// backRight.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// this.frontLeft.setDirection(DcMotorSimple.Direction.FORWARD);
// this.backRight.setDirection(DcMotorSimple.Direction.REVERSE);
// this.frontRight.setDirection(DcMotorSimple.Direction.REVERSE);
// this.backLeft.setDirection(DcMotorSimple.Direction.FORWARD);
// }
//
// public void setInput(Gamepad gamepad1,Gamepad gamepad2) {
// float x = -gamepad1.left_stick_x;
// float y = gamepad1.left_stick_y;
// float z = -gamepad1.right_stick_x;
// float flPower = x + y + z;
// float frPower = -x + y - z;
// float blPower = -x + y + z;
// float brPower = x + y - z;
//
// float max = Math.max(1, Math.max(Math.max(Math.abs(blPower), Math.abs(brPower)), Math.max(Math.abs(flPower),Math.abs(frPower))));
// float scale = 1-((max-1) / max);
// flPower *= scale;
// frPower *= scale;
// blPower *= scale;
// brPower *= scale;
//
// frontLeft.setPower(flPower);
// backLeft.setPower(blPower);
// frontRight.setPower(frPower);
// backRight.setPower(brPower);
// }
//
// public void update() {
// DriveSignal signal = trajectorySequenceRunner.update(getPoseEstimate(), getPoseVelocity());
// if (signal != null) setDriveSignal(signal);
// }
//
// public TrajectoryBuilder trajectoryBuilder(Pose2d startPose) {
// return new TrajectoryBuilder(startPose, VEL_CONSTRAINT, ACCEL_CONSTRAINT);
// }
//
// public static TrajectoryVelocityConstraint getVelocityConstraint(double maxVel, double maxAngularVel, double trackWidth) {
// return new MinVelocityConstraint(Arrays.asList(
// new AngularVelocityConstraint(maxAngularVel),
// new MecanumVelocityConstraint(maxVel, trackWidth)
// ));
// }
//
// public static TrajectoryAccelerationConstraint getAccelerationConstraint(double maxAccel) {
// return new ProfileAccelerationConstraint(maxAccel);
// }
//}

View File

@ -1,205 +0,0 @@
package org.firstinspires.ftc.teamcode.opmode.autonomous;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.teamcode.hardware.Robot;
import org.firstinspires.ftc.teamcode.util.CameraPosition;
import java.util.ArrayList;
import java.util.Locale;
public abstract class AbstractAuto extends LinearOpMode {
public Robot robot;
private int teamElementLocation = 2;
private ArrayList<Step> steps;
private double currentRuntime;
public CameraPosition cameraPosition;
// Main method to run all the steps for autonomous
@Override
public void runOpMode() {
// telemetry = new MultipleTelemetry(telemetry, FtcDashboard.getInstance().getTelemetry());
telemetry.addLine("Initializing Robot...");
telemetry.update();
setCameraPosition();
robot = new Robot(hardwareMap);
makeTrajectories();
while (robot.camera.getFrameCount() < 1) {
idle();
}
// wait for start
while (!(isStarted() || isStopRequested())) {
currentRuntime = getRuntime();
robot.update(currentRuntime);
int newLocation = robot.camera.getMarkerId();
if (newLocation != -1) {
teamElementLocation = newLocation;
}
telemetry.addLine("Initialized");
telemetry.addLine("Randomization: "+teamElementLocation);
telemetry.addLine(robot.getTelemetry());
telemetry.update();
}
resetRuntime();
// build the first step
steps = new ArrayList<>();
stopCamera();
initializeSteps(teamElementLocation);
int stepNumber = 0;
double stepTimeout;
Step step = steps.get(stepNumber);
stepTimeout = step.getTimeout() != -1 ? currentRuntime + step.getTimeout() : Double.MAX_VALUE;
step.start();
// run the remaining steps
while (opModeIsActive()) {
currentRuntime = getRuntime();
// once a step finishes
if (step.isFinished() || currentRuntime >= stepTimeout) {
// do the finishing move
step.end();
stepNumber++;
// if it was the last step break out of the while loop
if (stepNumber > steps.size() - 1) {
break;
}
// else continue to the next step
step = steps.get(stepNumber);
stepTimeout = step.getTimeout() != -1 ? currentRuntime + step.getTimeout() : Double.MAX_VALUE;
step.start();
}
// // update turret and slides position
// PoseStorage.slidesPosition = robot.actuators.getSlides();
// PoseStorage.turretPosition = robot.actuators.getTurret();
// PoseStorage.currentPose = robot.drive.getPoseEstimate();
// while the step is running display telemetry
step.whileRunning();
robot.update(currentRuntime);
telemetry.addLine(String.format(Locale.US, "Runtime: %.0f", currentRuntime));
telemetry.addLine("Step " + (stepNumber + 1) + " of " + steps.size() + ", " + step.getTelemetry() + "\n");
telemetry.addLine(robot.getTelemetry());
telemetry.update();
}
}
// Load up all of the steps for the autonomous: to be overridden with the specific steps in the specific auto
public abstract void initializeSteps(int location);
//methods to be implemented in the specific autos
public abstract void setAlliance();
public abstract void setCameraPosition();
public abstract boolean useCamera();
public abstract void makeTrajectories();
// public abstract void setArm(Arm.Position armPos, Claw.Position clawPos);
//other methods that do certain tasks
public void turn(double degrees) {
steps.add(new Step("Following a trajectory") {
@Override
public void start() {
robot.drive.turn(degrees);
}
@Override
public void whileRunning() {
robot.drive.update();
}
@Override
public void end() {
}
@Override
public boolean isFinished() {
return !robot.drive.isBusy();
}
});
}
public void followTrajectory(Trajectory trajectory) {
steps.add(new Step("Following a trajectory") {
@Override
public void start() {
robot.drive.followTrajectoryAsync(trajectory);
}
@Override
public void whileRunning() {
robot.drive.update();
}
@Override
public void end() {
}
@Override
public boolean isFinished() {
return !robot.drive.isBusy();
}
});
}
// Functions to add steps
public void addDelay(double timeout) {
steps.add(new Step("Waiting for " + timeout + " seconds", timeout) {
@Override
public void start() {
}
@Override
public void whileRunning() {
}
@Override
public void end() {
}
@Override
public boolean isFinished() {
return false;
}
});
}
public void stopCamera() {
steps.add(new Step("Stopping Signal Camera") {
@Override
public void start() {
robot.camera.stopTargetingCamera();
}
@Override
public void whileRunning() {
}
@Override
public void end() {
}
@Override
public boolean isFinished() {
return true;
}
});
}
}

View File

@ -1,59 +1,59 @@
package org.firstinspires.ftc.teamcode.opmode.autonomous;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.geometry.Vector2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.hardware.Camera;
import org.firstinspires.ftc.teamcode.hardware.Intake;
import org.firstinspires.ftc.teamcode.hardware.Robot;
import org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants;
import org.firstinspires.ftc.teamcode.roadrunner.drive.SampleMecanumDrive;
import org.firstinspires.ftc.teamcode.util.CameraPosition;
@Autonomous(name = "Start From Left Wall", group = "Left Start", preselectTeleOp = "Khang Main")
public class StartFromLeftCenterSpike extends AbstractAuto {
public Robot robot;
public CameraPosition cameraPosition;
//Steps
public Trajectory start;
@Override
public void initializeSteps(int location) {
followTrajectory(start);
}
@Override
public void setAlliance() {}
@Override
public void setCameraPosition() {
cameraPosition = CameraPosition.FRONT;
}
@Override
public boolean useCamera() {
return true;
}
@Override
public void waitForStart() {
super.waitForStart();
}
@Override
public void makeTrajectories() {
// positions
Pose2d start = new Pose2d(-65.125,-48,Math.toRadians(90));
Vector2d step1 = new Vector2d(-24,-48);
Pose2d step2 = new Pose2d(-24,-48,Math.toRadians(90));
// this.start = robot.drive.trajectoryBuilder(start)
this.start = robot.drive.trajectoryBuilder(new Pose2d())
.forward(3)
.build();
}
}
//package org.firstinspires.ftc.teamcode.opmode.autonomous;
//import com.acmerobotics.roadrunner.geometry.Pose2d;
//import com.acmerobotics.roadrunner.geometry.Vector2d;
//import com.acmerobotics.roadrunner.trajectory.Trajectory;
//import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
//import com.qualcomm.robotcore.hardware.HardwareMap;
//
//import org.firstinspires.ftc.teamcode.hardware.Camera;
//import org.firstinspires.ftc.teamcode.hardware.Intake;
//import org.firstinspires.ftc.teamcode.hardware.Robot;
//import org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants;
//import org.firstinspires.ftc.teamcode.roadrunner.drive.SampleMecanumDrive;
//import org.firstinspires.ftc.teamcode.util.CameraPosition;
//
//@Autonomous(name = "Start From Left Wall", group = "Left Start", preselectTeleOp = "Khang Main")
//public class StartFromLeftCenterSpike extends AbstractAuto {
//
// public Robot robot;
// public CameraPosition cameraPosition;
// //Steps
// public Trajectory start;
//
// @Override
// public void initializeSteps(int location) {
// followTrajectory(start);
// }
//
// @Override
// public void setAlliance() {}
//
// @Override
// public void setCameraPosition() {
// cameraPosition = CameraPosition.FRONT;
// }
//
// @Override
// public boolean useCamera() {
// return true;
// }
//
// @Override
// public void waitForStart() {
// super.waitForStart();
// }
//
// @Override
// public void makeTrajectories() {
//
// // positions
// Pose2d start = new Pose2d(-65.125,-48,Math.toRadians(90));
// Vector2d step1 = new Vector2d(-24,-48);
// Pose2d step2 = new Pose2d(-24,-48,Math.toRadians(90));
//
//// this.start = robot.drive.trajectoryBuilder(start)
// this.start = robot.drive.trajectoryBuilder(new Pose2d())
// .forward(3)
// .build();
// }
//}

View File

@ -1,53 +1,53 @@
package org.firstinspires.ftc.teamcode.opmode.autonomous;
import org.firstinspires.ftc.teamcode.vision.Detection;
// Class for every step that the autonomous program will take
public abstract class Step {
private final double timeout;
private String telemetry;
// variables when moving
public double x;
public double y;
public double stepTime;
double tempTime = stepTime;
// variables when shooting
public Detection teamProp;
// Constructors
public Step(String telemetry) {
this.telemetry = telemetry;
this.timeout = -1;
}
public Step(String telemetry, double timeout) {
this.telemetry = telemetry;
this.timeout = timeout;
}
// Abstract methods to be overrode
public abstract void start();
public abstract void whileRunning();
public abstract void end();
public abstract boolean isFinished();
// Return the timeout for the step
public double getTimeout() {
return timeout;
}
public void setTelemetry(String telemetry) {
this.telemetry = telemetry;
}
// Return the Telemetry for the step
public String getTelemetry() {
return telemetry;
}
}
//package org.firstinspires.ftc.teamcode.opmode.autonomous;
//
//
//import org.firstinspires.ftc.teamcode.vision.Detection;
//
//// Class for every step that the autonomous program will take
//public abstract class Step {
// private final double timeout;
// private String telemetry;
//
// // variables when moving
// public double x;
// public double y;
// public double stepTime;
// double tempTime = stepTime;
//
// // variables when shooting
// public Detection teamProp;
//
// // Constructors
// public Step(String telemetry) {
// this.telemetry = telemetry;
// this.timeout = -1;
// }
//
// public Step(String telemetry, double timeout) {
// this.telemetry = telemetry;
// this.timeout = timeout;
// }
//
// // Abstract methods to be overrode
// public abstract void start();
//
// public abstract void whileRunning();
//
// public abstract void end();
//
// public abstract boolean isFinished();
//
// // Return the timeout for the step
// public double getTimeout() {
// return timeout;
// }
//
// public void setTelemetry(String telemetry) {
// this.telemetry = telemetry;
// }
//
// // Return the Telemetry for the step
// public String getTelemetry() {
// return telemetry;
// }
//}

View File

@ -1,29 +0,0 @@
package org.firstinspires.ftc.teamcode.opmode.autonomous;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.teamcode.hardware.Robot;
import org.firstinspires.ftc.teamcode.roadrunner.drive.SampleMecanumDrive;
@Config
@Autonomous (name = "Start From Left Center Spike Test", group = "Testing", preselectTeleOp = "KhangMain")
public class TestAuto extends LinearOpMode {
private Robot robot;
@Override
public void runOpMode() throws InterruptedException {
this.robot = new Robot(hardwareMap);
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
//bot is 15 in and a half so middle is 7.75
//17.8 in so half is 8.9 in
Pose2d startPOS = new Pose2d(-63.1, -36.75, Math.toRadians(90));
}
}

View File

@ -1,84 +0,0 @@
package org.firstinspires.ftc.teamcode.opmode.autonomous;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.geometry.Vector2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.hardware.Camera;
import org.firstinspires.ftc.teamcode.hardware.Drive;
import org.firstinspires.ftc.teamcode.hardware.Robot;
import org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants;
import org.firstinspires.ftc.teamcode.roadrunner.drive.SampleMecanumDrive;
import org.firstinspires.ftc.teamcode.util.CameraPosition;
@Autonomous(name = "Blue Left", group = "Left Start", preselectTeleOp = "Khang Main")
public class blueLeftAuto extends AbstractAuto{
public SampleMecanumDrive drive;
public Robot robot;
public Camera camera;
private boolean camEnabled = true;
public CameraPosition cameraPosition;
public Trajectory start;
@Override
public void setAlliance() {}
public void Robot(HardwareMap hardwareMap) {
//set to new Drive to revert
drive = new SampleMecanumDrive(hardwareMap);
camera = new Camera(hardwareMap);
camera.initTargetingCamera();
camEnabled = true;
}
@Override
public void makeTrajectories() {
// positions
Pose2d start = new Pose2d(-65.125,6,Math.toRadians(180));
// Pose2d scoreSpikeLeft = new Pose2d(-36,6,Math.toRadians(-90));
// Pose2d scoreSpikeCenter = new Pose2d(-36,6,Math.toRadians(180));
// Pose2d scoreSpikeRight = new Pose2d(-36,6,Math.toRadians(90));
// Pose2d scoreBoardLeft = new Pose2d(-42,48,Math.toRadians(-90));
// Pose2d scoreBoardCenter = new Pose2d(-36,48,Math.toRadians(-90));
// Pose2d scoreBoardRight = new Pose2d(-30,48,Math.toRadians(-90));
// Pose2d park1 = new Pose2d(-60,48,Math.toRadians(-90));
// Pose2d park2 = new Pose2d(-60,60,Math.toRadians(-90));
// this.start = robot.drive.trajectoryBuilder(start)
// .lineToLinearHeading(scoreSpikeCenter)
// .build();
// this.start = robot.drive.trajectoryBuilder(scoreSpikeCenter)
// .lineToLinearHeading(scoreBoardCenter)
// .build();
// this.start = robot.drive.trajectoryBuilder(scoreBoardCenter)
// .lineToLinearHeading(park1)
// .build();
// this.start = robot.drive.trajectoryBuilder(park1)
// .lineToLinearHeading(park2)
// .build();
drive.setPoseEstimate(start);
Trajectory scoreSpikeCenter = drive.trajectoryBuilder(start)
.splineTo(new Vector2d(-36,6), Math.toRadians(180))
.build();
Trajectory scoreBoardCenter = drive.trajectoryBuilder(scoreSpikeCenter.end())
.splineTo(new Vector2d(-36,48), Math.toRadians(-90))
.build();
Trajectory park1 = drive.trajectoryBuilder(scoreBoardCenter.end())
.splineTo(new Vector2d(-60,48), Math.toRadians(-90))
.build();
Trajectory park2 = drive.trajectoryBuilder(park1.end())
.splineTo(new Vector2d(-60,60), Math.toRadians(180))
.build();
drive.followTrajectory(scoreSpikeCenter);
drive.followTrajectory(scoreBoardCenter);
drive.followTrajectory(park1);
drive.followTrajectory(park2);
}
}

View File

@ -1,48 +0,0 @@
package org.firstinspires.ftc.teamcode.opmode.autonomous;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.geometry.Vector2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.hardware.Camera;
import org.firstinspires.ftc.teamcode.hardware.Drive;
import org.firstinspires.ftc.teamcode.hardware.Robot;
import org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants;
import org.firstinspires.ftc.teamcode.roadrunner.drive.SampleMecanumDrive;
import org.firstinspires.ftc.teamcode.util.CameraPosition;
@Autonomous(name = "Blue Left Park", group = "Left Start", preselectTeleOp = "Khang Main")
public class parkAutoBlue extends AbstractAuto{
public SampleMecanumDrive drive;
public Robot robot;
public Camera camera;
private boolean camEnabled = true;
public CameraPosition cameraPosition;
public Trajectory start;
@Override
public void setAlliance() {}
public void Robot(HardwareMap hardwareMap) {
//set to new Drive to revert
drive = new SampleMecanumDrive(hardwareMap);
camera = new Camera(hardwareMap);
camera.initTargetingCamera();
camEnabled = true;
}
@Override
public void makeTrajectories() {
// positions
Pose2d start = new Pose2d(-65.125,6,Math.toRadians(180));
drive.setPoseEstimate(start);
Trajectory park = drive.trajectoryBuilder(start)
.splineTo(new Vector2d(-65.125,60), Math.toRadians(180))
.build();
drive.followTrajectory(park);
}
}

View File

@ -1,48 +0,0 @@
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.geometry.Vector2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.hardware.Camera;
import org.firstinspires.ftc.teamcode.hardware.Drive;
import org.firstinspires.ftc.teamcode.hardware.Robot;
import org.firstinspires.ftc.teamcode.opmode.autonomous.AbstractAuto;
import org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants;
import org.firstinspires.ftc.teamcode.roadrunner.drive.SampleMecanumDrive;
import org.firstinspires.ftc.teamcode.util.CameraPosition;
@Autonomous(name = "Red Right Park", group = "Right Start", preselectTeleOp = "Khang Main")
public class parkAutoRed extends AbstractAuto {
public SampleMecanumDrive drive;
public Robot robot;
public Camera camera;
private boolean camEnabled = true;
public CameraPosition cameraPosition;
public Trajectory start;
@Override
public void setAlliance() {}
public void Robot(HardwareMap hardwareMap) {
//set to new Drive to revert
drive = new SampleMecanumDrive(hardwareMap);
camera = new Camera(hardwareMap);
camera.initTargetingCamera();
camEnabled = true;
}
@Override
public void makeTrajectories() {
// positions
Pose2d start = new Pose2d(65.125,6,Math.toRadians(180));
drive.setPoseEstimate(start);
Trajectory park = drive.trajectoryBuilder(start)
.splineTo(new Vector2d(65.125,60), Math.toRadians(180))
.build();
drive.followTrajectory(park);
}
}

View File

@ -14,11 +14,15 @@ import org.firstinspires.ftc.teamcode.hardware.HopperPos;
import org.firstinspires.ftc.teamcode.hardware.Intake;
import org.firstinspires.ftc.teamcode.hardware.Robot;
import org.firstinspires.ftc.teamcode.hardware.SlidePos;
import com.qualcomm.robotcore.util.ElapsedTime;
@Config
@TeleOp(name = "Meet 1 TeleOp", group = "OpModes")
public class KhangMain extends OpMode {
//keep track of runtime for advanced macros
private ElapsedTime runTime = new ElapsedTime();
//create and set start position of intake
private Intake.Position Currentpos = Intake.Position.UP;
//inform if slides are in lowest position or not
@ -47,6 +51,9 @@ public class KhangMain extends OpMode {
@Override
public void init() {
//reset runtime when opmode is initialized
runTime.reset();
//make each servo, motor, and controller and configure them
planeServo = hardwareMap.get(Servo.class, "Plane Servo");
this.robot = new Robot(hardwareMap);
@ -61,6 +68,12 @@ public class KhangMain extends OpMode {
@Override
public void loop() {
// Calculate the runtime in seconds
double currentTime = runTime.seconds();
//set start of macro runtime
double macroStartTime = 0;
//create and set X, Y, and Z for drive inputs
double x = -gamepad1.left_stick_y;
double y = -gamepad1.left_stick_x;
@ -69,18 +82,18 @@ public class KhangMain extends OpMode {
//set intake to be pressure reactant to right trigger
robot.intake.setDcMotor(gamepad2.right_trigger);
//create variable for door open or close to open and close in sync with intake
double door = gamepad2.right_trigger;
//manual slide height control
double sHeight = gamepad2.right_stick_y;
//set slides all the way down aka reset button
//activate intake or not
double intakeON = gamepad2.right_trigger;
//graph input of Y joystick to make macros for slides
double sY = -gamepad2.left_stick_y;
//graph of X
double sX = gamepad2.left_stick_x;
//reset value to set slides to starting value
double reset = gamepad2.left_trigger;
//variable to determine shoot drone or not
double shoot = gamepad1.right_trigger;
@ -95,8 +108,6 @@ public class KhangMain extends OpMode {
robot.arm.setPos(CurrentApos);
//door open or not
robot.arm.openDoor(CurrentDpos);
//hopper position
robot.arm.setHopper(hopperpos);
//update
controller2.update();
@ -119,17 +130,24 @@ public class KhangMain extends OpMode {
//make door rise as intake goes on
if (intakeON >= 0.01) {
CurrentDpos = DoorPos.DoorPosition.OPEN;
Currentpos = Intake.Position.STACK1;
} else {
CurrentDpos = DoorPos.DoorPosition.CLOSE;
Currentpos = Intake.Position.UP;
}
//control position of hopper
if (controller2.getLeftBumper().isJustPressed()) {
hopperpos = HopperPos.hopperPos.UP;
} else if (controller2.getLeftBumper().isJustReleased()) {
hopperpos = HopperPos.hopperPos.DOWN;
//reset slide position
if (reset >= 0.2) {
CurrentSpos = CurrentSpos.DOWN;
}
// //control position of hopper
// if (controller2.getLeftBumper().isJustPressed()) {
// hopperpos = HopperPos.hopperPos.UP;
// } else if (controller2.getLeftBumper().isJustReleased()) {
// hopperpos = HopperPos.hopperPos.DOWN;
// }
//shift intake up one pixel
if (controller2.getDLeft().isJustPressed()) {
//prevent from going higher than highest legal value
@ -180,28 +198,38 @@ public class KhangMain extends OpMode {
DownQuestion = DownQuestion.YES;
}
if (sY >= 0.2) {
//set slides to max when left joystick is up
if (sY >= 0.5) {
CurrentSpos = CurrentSpos.MAX;
}
if (sY <= -0.2) {
//set slides to tape 1 level when left joystick is down
if (sY <= -0.5) {
CurrentSpos = CurrentSpos.TAPE1;
}
//set slides to tape 3 when left joystick is right
if (sX >= 0.2) {
CurrentSpos = CurrentSpos.TAPE3;
}
//set slides to tape 2 when left joystick is left
if (sX <= -0.2) {
CurrentSpos = CurrentSpos.TAPE2;
}
if (gamepad2.left_trigger >= 0.35) {
//set slides all the way down when left bumper gets pressed
if (controller2.getLeftBumper().isJustPressed()) {
CurrentSpos = CurrentSpos.DOWN;
}
// if (gamepad2.left_stick_button = true) {
// CurrentSpos = CurrentSpos.DOWN;
// }
//set intake all teh way up when right bumper is pressed
if (controller2.getRightBumper().isJustPressed()) {
Currentpos = Currentpos.UP;
}
// update the runtime on the telemetry
telemetry.addData("Runtime", currentTime);
telemetry.update();
}
}

View File

@ -1,321 +0,0 @@
package org.firstinspires.ftc.teamcode.roadrunner.drive;
import static org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants.MAX_ACCEL;
import static org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants.MAX_ANG_ACCEL;
import static org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants.MAX_ANG_VEL;
import static org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants.MAX_VEL;
import static org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants.MOTOR_VELO_PID;
import static org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants.RUN_USING_ENCODER;
import static org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants.TRACK_WIDTH;
import static org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants.encoderTicksToInches;
import static org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants.kA;
import static org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants.kStatic;
import static org.firstinspires.ftc.teamcode.roadrunner.drive.DriveConstants.kV;
import androidx.annotation.NonNull;
import com.acmerobotics.roadrunner.control.PIDCoefficients;
import com.acmerobotics.roadrunner.drive.DriveSignal;
import com.acmerobotics.roadrunner.drive.TankDrive;
import com.acmerobotics.roadrunner.followers.TankPIDVAFollower;
import com.acmerobotics.roadrunner.followers.TrajectoryFollower;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.acmerobotics.roadrunner.trajectory.TrajectoryBuilder;
import com.acmerobotics.roadrunner.trajectory.constraints.AngularVelocityConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.MinVelocityConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.ProfileAccelerationConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.TankVelocityConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.TrajectoryAccelerationConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.TrajectoryVelocityConstraint;
import com.qualcomm.hardware.bosch.BNO055IMU;
import com.qualcomm.hardware.lynx.LynxModule;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.PIDFCoefficients;
import com.qualcomm.robotcore.hardware.VoltageSensor;
import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType;
import org.firstinspires.ftc.teamcode.roadrunner.trajectorysequence.TrajectorySequence;
import org.firstinspires.ftc.teamcode.roadrunner.trajectorysequence.TrajectorySequenceBuilder;
import org.firstinspires.ftc.teamcode.roadrunner.trajectorysequence.TrajectorySequenceRunner;
import org.firstinspires.ftc.teamcode.roadrunner.util.LynxModuleUtil;
import java.util.Arrays;
import java.util.List;
/*
* Simple tank drive hardware implementation for REV hardware.
*/
public class SampleTankDrive extends TankDrive {
public static PIDCoefficients AXIAL_PID = new PIDCoefficients(0, 0, 0);
public static PIDCoefficients CROSS_TRACK_PID = new PIDCoefficients(0, 0, 0);
public static PIDCoefficients HEADING_PID = new PIDCoefficients(0, 0, 0);
public static double VX_WEIGHT = 1;
public static double OMEGA_WEIGHT = 1;
private TrajectorySequenceRunner trajectorySequenceRunner;
private static final TrajectoryVelocityConstraint VEL_CONSTRAINT = getVelocityConstraint(MAX_VEL, MAX_ANG_VEL, TRACK_WIDTH);
private static final TrajectoryAccelerationConstraint accelConstraint = getAccelerationConstraint(MAX_ACCEL);
private TrajectoryFollower follower;
private List<DcMotorEx> motors, leftMotors, rightMotors;
private BNO055IMU imu;
private VoltageSensor batteryVoltageSensor;
public SampleTankDrive(HardwareMap hardwareMap) {
super(kV, kA, kStatic, TRACK_WIDTH);
follower = new TankPIDVAFollower(AXIAL_PID, CROSS_TRACK_PID,
new Pose2d(0.5, 0.5, Math.toRadians(5.0)), 0.5);
LynxModuleUtil.ensureMinimumFirmwareVersion(hardwareMap);
batteryVoltageSensor = hardwareMap.voltageSensor.iterator().next();
for (LynxModule module : hardwareMap.getAll(LynxModule.class)) {
module.setBulkCachingMode(LynxModule.BulkCachingMode.AUTO);
}
// TODO: adjust the names of the following hardware devices to match your configuration
imu = hardwareMap.get(BNO055IMU.class, "imu");
BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
parameters.angleUnit = BNO055IMU.AngleUnit.RADIANS;
imu.initialize(parameters);
// TODO: If the hub containing the IMU you are using is mounted so that the "REV" logo does
// not face up, remap the IMU axes so that the z-axis points upward (normal to the floor.)
//
// | +Z axis
// |
// |
// |
// _______|_____________ +Y axis
// / |_____________/|__________
// / REV / EXPANSION //
// / / HUB //
// /_______/_____________//
// |_______/_____________|/
// /
// / +X axis
//
// This diagram is derived from the axes in section 3.4 https://www.bosch-sensortec.com/media/boschsensortec/downloads/datasheets/bst-bno055-ds000.pdf
// and the placement of the dot/orientation from https://docs.revrobotics.com/rev-control-system/control-system-overview/dimensions#imu-location
//
// For example, if +Y in this diagram faces downwards, you would use AxisDirection.NEG_Y.
// BNO055IMUUtil.remapZAxis(imu, AxisDirection.NEG_Y);
// add/remove motors depending on your robot (e.g., 6WD)
DcMotorEx leftFront = hardwareMap.get(DcMotorEx.class, "leftFront");
DcMotorEx leftRear = hardwareMap.get(DcMotorEx.class, "leftRear");
DcMotorEx rightRear = hardwareMap.get(DcMotorEx.class, "rightRear");
DcMotorEx rightFront = hardwareMap.get(DcMotorEx.class, "rightFront");
motors = Arrays.asList(leftFront, leftRear, rightRear, rightFront);
leftMotors = Arrays.asList(leftFront, leftRear);
rightMotors = Arrays.asList(rightFront, rightRear);
for (DcMotorEx motor : motors) {
MotorConfigurationType motorConfigurationType = motor.getMotorType().clone();
motorConfigurationType.setAchieveableMaxRPMFraction(1.0);
motor.setMotorType(motorConfigurationType);
}
if (RUN_USING_ENCODER) {
setMode(DcMotor.RunMode.RUN_USING_ENCODER);
}
setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
if (RUN_USING_ENCODER && MOTOR_VELO_PID != null) {
setPIDFCoefficients(DcMotor.RunMode.RUN_USING_ENCODER, MOTOR_VELO_PID);
}
// TODO: reverse any motors using DcMotor.setDirection()
// TODO: if desired, use setLocalizer() to change the localization method
// for instance, setLocalizer(new ThreeTrackingWheelLocalizer(...));
trajectorySequenceRunner = new TrajectorySequenceRunner(follower, HEADING_PID);
}
public TrajectoryBuilder trajectoryBuilder(Pose2d startPose) {
return new TrajectoryBuilder(startPose, VEL_CONSTRAINT, accelConstraint);
}
public TrajectoryBuilder trajectoryBuilder(Pose2d startPose, boolean reversed) {
return new TrajectoryBuilder(startPose, reversed, VEL_CONSTRAINT, accelConstraint);
}
public TrajectoryBuilder trajectoryBuilder(Pose2d startPose, double startHeading) {
return new TrajectoryBuilder(startPose, startHeading, VEL_CONSTRAINT, accelConstraint);
}
public TrajectorySequenceBuilder trajectorySequenceBuilder(Pose2d startPose) {
return new TrajectorySequenceBuilder(
startPose,
VEL_CONSTRAINT, accelConstraint,
MAX_ANG_VEL, MAX_ANG_ACCEL
);
}
public void turnAsync(double angle) {
trajectorySequenceRunner.followTrajectorySequenceAsync(
trajectorySequenceBuilder(getPoseEstimate())
.turn(angle)
.build()
);
}
public void turn(double angle) {
turnAsync(angle);
waitForIdle();
}
public void followTrajectoryAsync(Trajectory trajectory) {
trajectorySequenceRunner.followTrajectorySequenceAsync(
trajectorySequenceBuilder(trajectory.start())
.addTrajectory(trajectory)
.build()
);
}
public void followTrajectory(Trajectory trajectory) {
followTrajectoryAsync(trajectory);
waitForIdle();
}
public void followTrajectorySequenceAsync(TrajectorySequence trajectorySequence) {
trajectorySequenceRunner.followTrajectorySequenceAsync(trajectorySequence);
}
public void followTrajectorySequence(TrajectorySequence trajectorySequence) {
followTrajectorySequenceAsync(trajectorySequence);
waitForIdle();
}
public Pose2d getLastError() {
return trajectorySequenceRunner.getLastPoseError();
}
public void update() {
updatePoseEstimate();
DriveSignal signal = trajectorySequenceRunner.update(getPoseEstimate(), getPoseVelocity());
if (signal != null) setDriveSignal(signal);
}
public void waitForIdle() {
while (!Thread.currentThread().isInterrupted() && isBusy())
update();
}
public boolean isBusy() {
return trajectorySequenceRunner.isBusy();
}
public void setMode(DcMotor.RunMode runMode) {
for (DcMotorEx motor : motors) {
motor.setMode(runMode);
}
}
public void setZeroPowerBehavior(DcMotor.ZeroPowerBehavior zeroPowerBehavior) {
for (DcMotorEx motor : motors) {
motor.setZeroPowerBehavior(zeroPowerBehavior);
}
}
public void setPIDFCoefficients(DcMotor.RunMode runMode, PIDFCoefficients coefficients) {
PIDFCoefficients compensatedCoefficients = new PIDFCoefficients(
coefficients.p, coefficients.i, coefficients.d,
coefficients.f * 12 / batteryVoltageSensor.getVoltage()
);
for (DcMotorEx motor : motors) {
motor.setPIDFCoefficients(runMode, compensatedCoefficients);
}
}
public void setWeightedDrivePower(Pose2d drivePower) {
Pose2d vel = drivePower;
if (Math.abs(drivePower.getX()) + Math.abs(drivePower.getHeading()) > 1) {
// re-normalize the powers according to the weights
double denom = VX_WEIGHT * Math.abs(drivePower.getX())
+ OMEGA_WEIGHT * Math.abs(drivePower.getHeading());
vel = new Pose2d(
VX_WEIGHT * drivePower.getX(),
0,
OMEGA_WEIGHT * drivePower.getHeading()
).div(denom);
}
setDrivePower(vel);
}
@NonNull
@Override
public List<Double> getWheelPositions() {
double leftSum = 0, rightSum = 0;
for (DcMotorEx leftMotor : leftMotors) {
leftSum += encoderTicksToInches(leftMotor.getCurrentPosition());
}
for (DcMotorEx rightMotor : rightMotors) {
rightSum += encoderTicksToInches(rightMotor.getCurrentPosition());
}
return Arrays.asList(leftSum / leftMotors.size(), rightSum / rightMotors.size());
}
public List<Double> getWheelVelocities() {
double leftSum = 0, rightSum = 0;
for (DcMotorEx leftMotor : leftMotors) {
leftSum += encoderTicksToInches(leftMotor.getVelocity());
}
for (DcMotorEx rightMotor : rightMotors) {
rightSum += encoderTicksToInches(rightMotor.getVelocity());
}
return Arrays.asList(leftSum / leftMotors.size(), rightSum / rightMotors.size());
}
@Override
public void setMotorPowers(double v, double v1) {
for (DcMotorEx leftMotor : leftMotors) {
leftMotor.setPower(v);
}
for (DcMotorEx rightMotor : rightMotors) {
rightMotor.setPower(v1);
}
}
@Override
public double getRawExternalHeading() {
return imu.getAngularOrientation().firstAngle;
}
@Override
public Double getExternalHeadingVelocity() {
// To work around an SDK bug, use -zRotationRate in place of xRotationRate
// and -xRotationRate in place of zRotationRate (yRotationRate behaves as
// expected). This bug does NOT affect orientation.
//
// See https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/251 for details.
return (double) -imu.getAngularVelocity().xRotationRate;
}
public static TrajectoryVelocityConstraint getVelocityConstraint(double maxVel, double maxAngularVel, double trackWidth) {
return new MinVelocityConstraint(Arrays.asList(
new AngularVelocityConstraint(maxAngularVel),
new TankVelocityConstraint(maxVel, trackWidth)
));
}
public static TrajectoryAccelerationConstraint getAccelerationConstraint(double maxAccel) {
return new ProfileAccelerationConstraint(maxAccel);
}
}

View File

@ -93,14 +93,14 @@ android {
signingConfig signingConfigs.release
ndk {
abiFilters "armeabi-v7a", "arm64-v8a"
abiFilters "armeabi-v7a"
}
}
debug {
debuggable true
jniDebuggable true
ndk {
abiFilters "armeabi-v7a", "arm64-v8a"
abiFilters "armeabi-v7a"
}
}
}