Compare commits

...

4 Commits

Author SHA1 Message Date
Cal Kestis 1da45a36c6
Merge pull request #941 from FIRST-Tech-Challenge/20240215-115542-release-candidate
FtcRobotController v9.1
2024-02-16 14:13:01 -08:00
Cal Kestis c303962469 FtcRobotController v9.1 2024-02-15 13:10:49 -08:00
Cal Kestis f326c0d033
Merge pull request #731 from FIRST-Tech-Challenge/20230929-083754-release-candidate
FtcRobotController v9.0.1
2023-09-29 19:13:38 +09:00
Cal Kestis c023e97a6a FtcRobotController v9.0.1 2023-09-29 19:11:41 +09:00
9 changed files with 273 additions and 36 deletions

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:versionCode="51"
android:versionName="9.0">
android:versionCode="53"
android:versionName="9.1">
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />

View File

@ -29,6 +29,7 @@
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.util.Size;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
@ -44,6 +45,23 @@ import java.util.List;
* This OpMode illustrates the basics of AprilTag recognition and pose estimation,
* including Java Builder structures for specifying Vision parameters.
*
* For an introduction to AprilTags, see the FTC-DOCS link below:
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
*
* In this sample, any visible tag ID will be detected and displayed, but only tags that are included in the default
* "TagLibrary" will have their position and orientation information displayed. This default TagLibrary contains
* the current Season's AprilTags and a small set of "test Tags" in the high number range.
*
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
* https://ftc-docs.firstinspires.org/apriltag-detection-values
*
* To experiment with using AprilTags to navigate, try out these two driving samples:
* RobotAutoDriveToAprilTagOmni and RobotAutoDriveToAprilTagTank
*
* There are many "default" VisionPortal and AprilTag configuration parameters that may be overridden if desired.
* These default parameters are shown as comments in the code below.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@ -106,6 +124,8 @@ public class ConceptAprilTag extends LinearOpMode {
// Create the AprilTag processor.
aprilTag = new AprilTagProcessor.Builder()
// The following default settings are available to un-comment and edit as needed.
//.setDrawAxes(false)
//.setDrawCubeProjection(false)
//.setDrawTagOutline(true)
@ -117,11 +137,19 @@ public class ConceptAprilTag extends LinearOpMode {
// If you do not manually specify calibration parameters, the SDK will attempt
// to load a predefined calibration for your camera.
//.setLensIntrinsics(578.272, 578.272, 402.145, 221.506)
// ... these parameters are fx, fy, cx, cy.
.build();
// Adjust Image Decimation to trade-off detection-range for detection-rate.
// eg: Some typical detection data using a Logitech C920 WebCam
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second (default)
// Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second (default)
// Note: Decimation can be changed on-the-fly to adapt during a match.
//aprilTag.setDecimation(3);
// Create the vision portal by using a builder.
VisionPortal.Builder builder = new VisionPortal.Builder();
@ -136,7 +164,7 @@ public class ConceptAprilTag extends LinearOpMode {
//builder.setCameraResolution(new Size(640, 480));
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
//builder.enableCameraMonitoring(true);
//builder.enableLiveView(true);
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);

View File

@ -44,6 +44,20 @@ import java.util.List;
* This OpMode illustrates the basics of AprilTag recognition and pose estimation, using
* the easy way.
*
* For an introduction to AprilTags, see the FTC-DOCS link below:
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
*
* In this sample, any visible tag ID will be detected and displayed, but only tags that are included in the default
* "TagLibrary" will have their position and orientation information displayed. This default TagLibrary contains
* the current Season's AprilTags and a small set of "test Tags" in the high number range.
*
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
* https://ftc-docs.firstinspires.org/apriltag-detection-values
*
* To experiment with using AprilTags to navigate, try out these two driving samples:
* RobotAutoDriveToAprilTagOmni and RobotAutoDriveToAprilTagTank
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/

View File

@ -29,6 +29,7 @@
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.util.Size;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
@ -53,6 +54,17 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
// TFOD_MODEL_ASSET points to a model file stored in the project Asset location,
// this is only used for Android Studio when using models in Assets.
private static final String TFOD_MODEL_ASSET = "MyModelStoredAsAsset.tflite";
// TFOD_MODEL_FILE points to a model file stored onboard the Robot Controller's storage,
// this is used when uploading models directly to the RC using the model upload interface.
private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/myCustomModel.tflite";
// Define the labels recognized in the model for TFOD (must be in training order!)
private static final String[] LABELS = {
"Pixel",
};
/**
* The variable to store our instance of the TensorFlow Object Detection processor.
*/
@ -107,11 +119,16 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
// Create the TensorFlow processor by using a builder.
tfod = new TfodProcessor.Builder()
// Use setModelAssetName() if the TF Model is built in as an asset.
// Use setModelFileName() if you have downloaded a custom team model to the Robot Controller.
// With the following lines commented out, the default TfodProcessor Builder
// will load the default model for the season. To define a custom model to load,
// choose one of the following:
// Use setModelAssetName() if the custom TF Model is built in as an asset (AS only).
// Use setModelFileName() if you have downloaded a custom team model to the Robot Controller.
//.setModelAssetName(TFOD_MODEL_ASSET)
//.setModelFileName(TFOD_MODEL_FILE)
// The following default settings are available to un-comment and edit as needed to
// set parameters for custom models.
//.setModelLabels(LABELS)
//.setIsModelTensorFlow2(true)
//.setIsModelQuantized(true)
@ -134,7 +151,7 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
//builder.setCameraResolution(new Size(640, 480));
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
//builder.enableCameraMonitoring(true);
//builder.enableLiveView(true);
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);

View File

@ -49,6 +49,13 @@ import java.util.concurrent.TimeUnit;
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
* The code assumes a Holonomic (Mecanum or X Drive) Robot.
*
* For an introduction to AprilTags, see the ftc-docs link below:
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
*
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
* https://ftc-docs.firstinspires.org/apriltag-detection-values
*
* The drive goal is to rotate to keep the Tag centered in the camera, while strafing to be directly in front of the tag, and
* driving towards the tag to achieve the desired distance.
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
@ -102,7 +109,7 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
private DcMotor rightBackDrive = null; // Used to control the right back drive wheel
private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
private static final int DESIRED_TAG_ID = 0; // Choose the tag you want to approach or set to -1 for ANY tag.
private static final int DESIRED_TAG_ID = -1; // Choose the tag you want to approach or set to -1 for ANY tag.
private VisionPortal visionPortal; // Used to manage the video source.
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
@ -150,25 +157,33 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
// Step through the list of detected tags and look for a matching tag
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
for (AprilTagDetection detection : currentDetections) {
if ((detection.metadata != null) &&
((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) ){
targetFound = true;
desiredTag = detection;
break; // don't look any further.
// Look to see if we have size info on this tag.
if (detection.metadata != null) {
// Check to see if we want to track towards this tag.
if ((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) {
// Yes, we want to use this tag.
targetFound = true;
desiredTag = detection;
break; // don't look any further.
} else {
// This tag is in the library, but we do not want to track it right now.
telemetry.addData("Skipping", "Tag ID %d is not desired", detection.id);
}
} else {
telemetry.addData("Unknown Target", "Tag ID %d is not in TagLibrary\n", detection.id);
// This tag is NOT in the library, so we don't have enough information to track to it.
telemetry.addData("Unknown", "Tag ID %d is not in TagLibrary", detection.id);
}
}
// Tell the driver what we see, and what to do.
if (targetFound) {
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Target", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
telemetry.addData("\n>","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Found", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
telemetry.addData("Yaw","%3.0f degrees", desiredTag.ftcPose.yaw);
} else {
telemetry.addData(">","Drive using joysticks to find valid target\n");
telemetry.addData("\n>","Drive using joysticks to find valid target\n");
}
// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
@ -243,6 +258,15 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
// Create the AprilTag processor by using a builder.
aprilTag = new AprilTagProcessor.Builder().build();
// Adjust Image Decimation to trade-off detection-range for detection-rate.
// eg: Some typical detection data using a Logitech C920 WebCam
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second
// Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second
// Note: Decimation can be changed on-the-fly to adapt during a match.
aprilTag.setDecimation(2);
// Create the vision portal by using a builder.
if (USE_WEBCAM) {
visionPortal = new VisionPortal.Builder()

View File

@ -49,6 +49,13 @@ import java.util.concurrent.TimeUnit;
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
* The code assumes a basic two-wheel (Tank) Robot Drivetrain
*
* For an introduction to AprilTags, see the ftc-docs link below:
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
*
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
* https://ftc-docs.firstinspires.org/apriltag-detection-values
*
* The driving goal is to rotate to keep the tag centered in the camera, while driving towards the tag to achieve the desired distance.
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
* You can determine the best exposure and gain values by using the ConceptAprilTagOptimizeExposure OpMode in this Samples folder.
@ -97,7 +104,7 @@ public class RobotAutoDriveToAprilTagTank extends LinearOpMode
private DcMotor rightDrive = null; // Used to control the right drive wheel
private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
private static final int DESIRED_TAG_ID = 0; // Choose the tag you want to approach or set to -1 for ANY tag.
private static final int DESIRED_TAG_ID = -1; // Choose the tag you want to approach or set to -1 for ANY tag.
private VisionPortal visionPortal; // Used to manage the video source.
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
@ -140,24 +147,32 @@ public class RobotAutoDriveToAprilTagTank extends LinearOpMode
// Step through the list of detected tags and look for a matching tag
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
for (AprilTagDetection detection : currentDetections) {
if ((detection.metadata != null) &&
((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) ){
targetFound = true;
desiredTag = detection;
break; // don't look any further.
// Look to see if we have size info on this tag.
if (detection.metadata != null) {
// Check to see if we want to track towards this tag.
if ((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) {
// Yes, we want to use this tag.
targetFound = true;
desiredTag = detection;
break; // don't look any further.
} else {
// This tag is in the library, but we do not want to track it right now.
telemetry.addData("Skipping", "Tag ID %d is not desired", detection.id);
}
} else {
telemetry.addData("Unknown Target", "Tag ID %d is not in TagLibrary\n", detection.id);
// This tag is NOT in the library, so we don't have enough information to track to it.
telemetry.addData("Unknown", "Tag ID %d is not in TagLibrary", detection.id);
}
}
// Tell the driver what we see, and what to do.
if (targetFound) {
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Target", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
telemetry.addData("\n>","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Found", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
} else {
telemetry.addData(">","Drive using joysticks to find valid target\n");
telemetry.addData("\n>","Drive using joysticks to find valid target\n");
}
// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
@ -218,6 +233,15 @@ public class RobotAutoDriveToAprilTagTank extends LinearOpMode
// Create the AprilTag processor by using a builder.
aprilTag = new AprilTagProcessor.Builder().build();
// Adjust Image Decimation to trade-off detection-range for detection-rate.
// eg: Some typical detection data using a Logitech C920 WebCam
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second
// Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second
// Note: Decimation can be changed on-the-fly to adapt during a match.
aprilTag.setDecimation(2);
// Create the vision portal by using a builder.
if (USE_WEBCAM) {
visionPortal = new VisionPortal.Builder()

View File

@ -0,0 +1,78 @@
/* Copyright (c) 2024 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DigitalChannel;
/*
* This OpMode demonstrates how to use a digital channel.
*
* The OpMode assumes that the digital channel is configured with a name of "digitalTouch".
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Sensor: digital channel", group = "Sensor")
@Disabled
public class SensorDigitalTouch extends LinearOpMode {
DigitalChannel digitalTouch; // Digital channel Object
@Override
public void runOpMode() {
// get a reference to our touchSensor object.
digitalTouch = hardwareMap.get(DigitalChannel.class, "digitalTouch");
digitalTouch.setMode(DigitalChannel.Mode.INPUT);
telemetry.addData("DigitalTouchSensorExample", "Press start to continue...");
telemetry.update();
// wait for the start button to be pressed.
waitForStart();
// while the OpMode is active, loop and read the digital channel.
// Note we use opModeIsActive() as our loop condition because it is an interruptible method.
while (opModeIsActive()) {
// button is pressed if value returned is LOW or false.
// send the info back to driver station using telemetry function.
if (digitalTouch.getState() == false) {
telemetry.addData("Button", "PRESSED");
} else {
telemetry.addData("Button", "NOT PRESSED");
}
telemetry.update();
}
}
}

View File

@ -59,6 +59,58 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
# Release Information
## Version 9.1 (20240215-115542)
### Enhancements
* Fixes a problem with Blocks: if the user closes a Block's warning balloon, it will still be closed next time the project is opened in the Blocks editor.
* In the Blocks editor, an alert concerning missing hardware devices is not shown if all the Blocks that use the missing hardware devices are disabled.
* Adds Blocks to support comparing property values CRServo.Direction, DCMotor.Direction, DCMotor.Mode, DCMotor.ZeroPowerBehavior, DigitalChannel.Mode, GyroSensor.HeadingMode, IrSeekerSensor.Mode, and Servo.Direction, to the corresponding enum Block.
* Improves OnBotJava auto-import to correctly import classes when used in certain situations.
* Improves OnBotJava autocomplete to provide better completion options in most cases.
* This fixes an issue where autocomplete would fail if a method with two or more formal parameters was defined.
* In OnBotJava, code folding support was added to expand and collapse code sections
* In OnBotJava, the copyright header is now automatically collapsed loading new files
* For all Blocks OpMode samples, intro comments have been moved to the RunOpMode comment balloon.
* The Clean up Blocks command in the Blocks editor now positions function Blocks so their comment balloons don't overlap other function Blocks.
* Added Blocks OpMode sample SensorTouch.
* Added Java OpMode sample SensorDigitalTouch.
* Several improvements to VisionPortal
* Adds option to control whether the stream is automatically started following a `.build()` call on a VisionPortal Builder
* Adds option to control whether the vision processing statistics overlay is rendered or not
* VisionPortals now implement the `CameraStreamSource` interface, allowing multiportal users to select which portal is routed to the DS in INIT by calling CameraStreamServer.getInstance().setSource(visionPortal). Can be selected via gamepad, between Camera Stream sessions.
* Add option to `AprilTagProcessor` to suppress calibration warnings
* Improves camera calibration warnings
* If a calibration is scaled, the resolution it was scaled from will be listed
* If calibrations exist with the wrong aspect ratio, the calibrated resolutions will be listed
* Fixes race condition which caused app crash when calling `stopStreaming()` immediately followed by `close()` on a VisionPortal
* Fixes IllegalStateException when calling `stopStreaming()` immediately after building a VisionPortal
* Added FTC Blocks counterparts to new Java methods:
* VisionPortal.Builder.setAutoStartStreamOnBuild
* VisionPortal.Builder.setShowStatsOverlay
* AprilTagProcessor.Builder.setSuppressCalibrationWarnings
* CameraStreamServer.setSource
### Bug Fixes
* Fixes a problem where OnBotJava does not apply font size settings to the editor.
* Updates EasyOpenCV dependency to v1.7.1
* Fixes inability to use EasyOpenCV CameraFactory in OnBotJava
* Fixes entire RC app crash when user pipeline throws an exception
* Fixes entire RC app crash when user user canvas annotator throws an exception
* Use the modern stacktrace display when handling user exceptions instead of the legacy ESTOP telemetry message
## Version 9.0.1 (20230929-083754)
### Enhancements
* Updates AprilTag samples to include Decimation and additional Comments. Also corrects misleading tag ID warnings
* Increases maximum size of Blocks inline comments to 140 characters
* Adds Blocks sample BasicOmniOpMode.
* Updated CENTERSTAGE library AprilTag orientation quaternions
* Thanks [@FromenActual](https://github.com/FromenActual)
* Updated Java Sample ConceptTensorFlowObjectDetection.java to include missing elements needed for custom model support.
### Bug Fixes
* Fixes a problem where after October 1 the Driver Station will report as obsolete on v9.0 and prompt the user to update.
## Version 9.0 (20230830-154348)
### Breaking Changes

View File

@ -4,15 +4,15 @@ repositories {
}
dependencies {
implementation 'org.firstinspires.ftc:Inspection:9.0.0'
implementation 'org.firstinspires.ftc:Blocks:9.0.0'
implementation 'org.firstinspires.ftc:Tfod:9.0.0'
implementation 'org.firstinspires.ftc:RobotCore:9.0.0'
implementation 'org.firstinspires.ftc:RobotServer:9.0.0'
implementation 'org.firstinspires.ftc:OnBotJava:9.0.0'
implementation 'org.firstinspires.ftc:Hardware:9.0.0'
implementation 'org.firstinspires.ftc:FtcCommon:9.0.0'
implementation 'org.firstinspires.ftc:Vision:9.0.0'
implementation 'org.firstinspires.ftc:Inspection:9.1.0'
implementation 'org.firstinspires.ftc:Blocks:9.1.0'
implementation 'org.firstinspires.ftc:Tfod:9.1.0'
implementation 'org.firstinspires.ftc:RobotCore:9.1.0'
implementation 'org.firstinspires.ftc:RobotServer:9.1.0'
implementation 'org.firstinspires.ftc:OnBotJava:9.1.0'
implementation 'org.firstinspires.ftc:Hardware:9.1.0'
implementation 'org.firstinspires.ftc:FtcCommon:9.1.0'
implementation 'org.firstinspires.ftc:Vision:9.1.0'
implementation 'org.firstinspires.ftc:gameAssets-CenterStage:1.0.0'
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.4.3'
runtimeOnly 'org.tensorflow:tensorflow-lite:2.12.0'