mirror of
https://github.com/trc492/FtcTemplate.git
synced 2025-07-01 13:01:24 -07:00
Updated to FTC SDK 10.1.
This commit is contained in:
@ -1,8 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:versionCode="55"
|
||||
android:versionName="10.0">
|
||||
android:versionCode="56"
|
||||
android:versionName="10.1">
|
||||
|
||||
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
|
||||
|
||||
|
@ -58,6 +58,9 @@ import java.util.List;
|
||||
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the robot, relative to the field origin.
|
||||
* This information is provided in the "robotPose" member of the returned "detection".
|
||||
*
|
||||
* To learn about the Field Coordinate System that is defined for FTC (and used by this OpMode), see the FTC-DOCS link below:
|
||||
* https://ftc-docs.firstinspires.org/en/latest/game_specific_resources/field_coordinate_system/field-coordinate-system.html
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*/
|
||||
|
@ -0,0 +1,191 @@
|
||||
/*
|
||||
* Copyright (c) 2024 Phil Malone
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import android.util.Size;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.util.SortOrder;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.Telemetry;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.opencv.ColorBlobLocatorProcessor;
|
||||
import org.firstinspires.ftc.vision.opencv.ColorRange;
|
||||
import org.firstinspires.ftc.vision.opencv.ImageRegion;
|
||||
import org.opencv.core.RotatedRect;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/*
|
||||
* This OpMode illustrates how to use a video source (camera) to locate specifically colored regions
|
||||
*
|
||||
* Unlike a "color sensor" which determines the color of an object in the field of view, this "color locator"
|
||||
* will search the Region Of Interest (ROI) in a camera image, and find any "blobs" of color that match the requested color range.
|
||||
* These blobs can be further filtered and sorted to find the one most likely to be the item the user is looking for.
|
||||
*
|
||||
* To perform this function, a VisionPortal runs a ColorBlobLocatorProcessor process.
|
||||
* The ColorBlobLocatorProcessor process is created first, and then the VisionPortal is built to use this process.
|
||||
* The ColorBlobLocatorProcessor analyses the ROI and locates pixels that match the ColorRange to form a "mask".
|
||||
* The matching pixels are then collected into contiguous "blobs" of pixels. The outer boundaries of these blobs are called its "contour".
|
||||
* For each blob, the process then creates the smallest possible rectangle "boxFit" that will fully encase the contour.
|
||||
* The user can then call getBlobs() to retrieve the list of Blobs, where each Blob contains the contour and the boxFit data.
|
||||
* Note: The default sort order for Blobs is ContourArea, in descending order, so the biggest contours are listed first.
|
||||
*
|
||||
* To aid the user, a colored boxFit rectangle is drawn on the camera preview to show the location of each Blob
|
||||
* The original Blob contour can also be added to the preview. This is helpful when configuring the ColorBlobLocatorProcessor parameters.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||
*/
|
||||
|
||||
@Disabled
|
||||
@TeleOp(name = "Concept: Vision Color-Locator", group = "Concept")
|
||||
public class ConceptVisionColorLocator extends LinearOpMode
|
||||
{
|
||||
@Override
|
||||
public void runOpMode()
|
||||
{
|
||||
/* Build a "Color Locator" vision processor based on the ColorBlobLocatorProcessor class.
|
||||
* - Specify the color range you are looking for. You can use a predefined color, or create you own color range
|
||||
* .setTargetColorRange(ColorRange.BLUE) // use a predefined color match
|
||||
* Available predefined colors are: RED, BLUE YELLOW GREEN
|
||||
* .setTargetColorRange(new ColorRange(ColorSpace.YCrCb, // or define your own color match
|
||||
* new Scalar( 32, 176, 0),
|
||||
* new Scalar(255, 255, 132)))
|
||||
*
|
||||
* - Focus the color locator by defining a RegionOfInterest (ROI) which you want to search.
|
||||
* This can be the entire frame, or a sub-region defined using:
|
||||
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
|
||||
* Use one form of the ImageRegion class to define the ROI.
|
||||
* ImageRegion.entireFrame()
|
||||
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner
|
||||
* ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5) 50% width/height square centered on screen
|
||||
*
|
||||
* - Define which contours are included.
|
||||
* You can get ALL the contours, or you can skip any contours that are completely inside another contour.
|
||||
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.ALL_FLATTENED_HIERARCHY) // return all contours
|
||||
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude contours inside other contours
|
||||
* note: EXTERNAL_ONLY helps to avoid bright reflection spots from breaking up areas of solid color.
|
||||
*
|
||||
* - turn the display of contours ON or OFF. Turning this on helps debugging but takes up valuable CPU time.
|
||||
* .setDrawContours(true)
|
||||
*
|
||||
* - include any pre-processing of the image or mask before looking for Blobs.
|
||||
* There are some extra processing you can include to improve the formation of blobs. Using these features requires
|
||||
* an understanding of how they may effect the final blobs. The "pixels" argument sets the NxN kernel size.
|
||||
* .setBlurSize(int pixels) Blurring an image helps to provide a smooth color transition between objects, and smoother contours.
|
||||
* The higher the number of pixels, the more blurred the image becomes.
|
||||
* Note: Even "pixels" values will be incremented to satisfy the "odd number" requirement.
|
||||
* Blurring too much may hide smaller features. A "pixels" size of 5 is good for a 320x240 image.
|
||||
* .setErodeSize(int pixels) Erosion removes floating pixels and thin lines so that only substantive objects remain.
|
||||
* Erosion can grow holes inside regions, and also shrink objects.
|
||||
* "pixels" in the range of 2-4 are suitable for low res images.
|
||||
* .setDilateSize(int pixels) Dilation makes objects more visible by filling in small holes, making lines appear thicker,
|
||||
* and making filled shapes appear larger. Dilation is useful for joining broken parts of an
|
||||
* object, such as when removing noise from an image.
|
||||
* "pixels" in the range of 2-4 are suitable for low res images.
|
||||
*/
|
||||
ColorBlobLocatorProcessor colorLocator = new ColorBlobLocatorProcessor.Builder()
|
||||
.setTargetColorRange(ColorRange.BLUE) // use a predefined color match
|
||||
.setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude blobs inside blobs
|
||||
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5)) // search central 1/4 of camera view
|
||||
.setDrawContours(true) // Show contours on the Stream Preview
|
||||
.setBlurSize(5) // Smooth the transitions between different colors in image
|
||||
.build();
|
||||
|
||||
/*
|
||||
* Build a vision portal to run the Color Locator process.
|
||||
*
|
||||
* - Add the colorLocator process created above.
|
||||
* - Set the desired video resolution.
|
||||
* Since a high resolution will not improve this process, choose a lower resolution that is
|
||||
* supported by your camera. This will improve overall performance and reduce latency.
|
||||
* - Choose your video source. This may be
|
||||
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
|
||||
* or
|
||||
* .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera
|
||||
*/
|
||||
VisionPortal portal = new VisionPortal.Builder()
|
||||
.addProcessor(colorLocator)
|
||||
.setCameraResolution(new Size(320, 240))
|
||||
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||
.build();
|
||||
|
||||
telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging.
|
||||
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
|
||||
|
||||
// WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode.
|
||||
while (opModeIsActive() || opModeInInit())
|
||||
{
|
||||
telemetry.addData("preview on/off", "... Camera Stream\n");
|
||||
|
||||
// Read the current list
|
||||
List<ColorBlobLocatorProcessor.Blob> blobs = colorLocator.getBlobs();
|
||||
|
||||
/*
|
||||
* The list of Blobs can be filtered to remove unwanted Blobs.
|
||||
* Note: All contours will be still displayed on the Stream Preview, but only those that satisfy the filter
|
||||
* conditions will remain in the current list of "blobs". Multiple filters may be used.
|
||||
*
|
||||
* Use any of the following filters.
|
||||
*
|
||||
* ColorBlobLocatorProcessor.Util.filterByArea(minArea, maxArea, blobs);
|
||||
* A Blob's area is the number of pixels contained within the Contour. Filter out any that are too big or small.
|
||||
* Start with a large range and then refine the range based on the likely size of the desired object in the viewfinder.
|
||||
*
|
||||
* ColorBlobLocatorProcessor.Util.filterByDensity(minDensity, maxDensity, blobs);
|
||||
* A blob's density is an indication of how "full" the contour is.
|
||||
* If you put a rubber band around the contour you would get the "Convex Hull" of the contour.
|
||||
* The density is the ratio of Contour-area to Convex Hull-area.
|
||||
*
|
||||
* ColorBlobLocatorProcessor.Util.filterByAspectRatio(minAspect, maxAspect, blobs);
|
||||
* A blob's Aspect ratio is the ratio of boxFit long side to short side.
|
||||
* A perfect Square has an aspect ratio of 1. All others are > 1
|
||||
*/
|
||||
ColorBlobLocatorProcessor.Util.filterByArea(50, 20000, blobs); // filter out very small blobs.
|
||||
|
||||
/*
|
||||
* The list of Blobs can be sorted using the same Blob attributes as listed above.
|
||||
* No more than one sort call should be made. Sorting can use ascending or descending order.
|
||||
* ColorBlobLocatorProcessor.Util.sortByArea(SortOrder.DESCENDING, blobs); // Default
|
||||
* ColorBlobLocatorProcessor.Util.sortByDensity(SortOrder.DESCENDING, blobs);
|
||||
* ColorBlobLocatorProcessor.Util.sortByAspectRatio(SortOrder.DESCENDING, blobs);
|
||||
*/
|
||||
|
||||
telemetry.addLine(" Area Density Aspect Center");
|
||||
|
||||
// Display the size (area) and center location for each Blob.
|
||||
for(ColorBlobLocatorProcessor.Blob b : blobs)
|
||||
{
|
||||
RotatedRect boxFit = b.getBoxFit();
|
||||
telemetry.addLine(String.format("%5d %4.2f %5.2f (%3d,%3d)",
|
||||
b.getContourArea(), b.getDensity(), b.getAspectRatio(), (int) boxFit.center.x, (int) boxFit.center.y));
|
||||
}
|
||||
|
||||
telemetry.update();
|
||||
sleep(50);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,136 @@
|
||||
/*
|
||||
* Copyright (c) 2024 Phil Malone
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import android.graphics.Color;
|
||||
import android.util.Size;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.opencv.ImageRegion;
|
||||
import org.firstinspires.ftc.vision.opencv.PredominantColorProcessor;
|
||||
|
||||
/*
|
||||
* This OpMode illustrates how to use a video source (camera) as a color sensor
|
||||
*
|
||||
* A "color sensor" will typically determine the color of the object that it is pointed at.
|
||||
*
|
||||
* This sample performs the same function, except it uses a video camera to inspect an object or scene.
|
||||
* The user may choose to inspect all, or just a Region of Interest (ROI), of the active camera view.
|
||||
* The user must also provide a list of "acceptable colors" (Swatches) from which the closest matching color will be selected.
|
||||
*
|
||||
* To perform this function, a VisionPortal runs a PredominantColorProcessor process.
|
||||
* The PredominantColorProcessor process is created first, and then the VisionPortal is built to use this process.
|
||||
* The PredominantColorProcessor analyses the ROI and splits the colored pixels into several color-clusters.
|
||||
* The largest of these clusters is then considered to be the "Predominant Color"
|
||||
* The process then matches the Predominant Color with the closest Swatch and returns that match.
|
||||
*
|
||||
* To aid the user, a colored rectangle is drawn on the camera preview to show the RegionOfInterest,
|
||||
* The Predominant Color is used to paint the rectangle border, so the user can verify that the color is reasonable.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||
*/
|
||||
|
||||
@Disabled
|
||||
@TeleOp(name = "Concept: Vision Color-Sensor", group = "Concept")
|
||||
public class ConceptVisionColorSensor extends LinearOpMode
|
||||
{
|
||||
@Override
|
||||
public void runOpMode()
|
||||
{
|
||||
/* Build a "Color Sensor" vision processor based on the PredominantColorProcessor class.
|
||||
*
|
||||
* - Focus the color sensor by defining a RegionOfInterest (ROI) which you want to inspect.
|
||||
* This can be the entire frame, or a sub-region defined using:
|
||||
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
|
||||
* Use one form of the ImageRegion class to define the ROI.
|
||||
* ImageRegion.entireFrame()
|
||||
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner
|
||||
* ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1) 10% width/height square centered on screen
|
||||
*
|
||||
* - Set the list of "acceptable" color swatches (matches).
|
||||
* Only colors that you assign here will be returned.
|
||||
* If you know the sensor will be pointing to one of a few specific colors, enter them here.
|
||||
* Or, if the sensor may be pointed randomly, provide some additional colors that may match the surrounding.
|
||||
* Possible choices are:
|
||||
* RED, ORANGE, YELLOW, GREEN, CYAN, BLUE, PURPLE, MAGENTA, BLACK, WHITE;
|
||||
*
|
||||
* Note that in the example shown below, only some of the available colors are included.
|
||||
* This will force any other colored region into one of these colors.
|
||||
* eg: Green may be reported as YELLOW, as this may be the "closest" match.
|
||||
*/
|
||||
PredominantColorProcessor colorSensor = new PredominantColorProcessor.Builder()
|
||||
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1))
|
||||
.setSwatches(
|
||||
PredominantColorProcessor.Swatch.RED,
|
||||
PredominantColorProcessor.Swatch.BLUE,
|
||||
PredominantColorProcessor.Swatch.YELLOW,
|
||||
PredominantColorProcessor.Swatch.BLACK,
|
||||
PredominantColorProcessor.Swatch.WHITE)
|
||||
.build();
|
||||
|
||||
/*
|
||||
* Build a vision portal to run the Color Sensor process.
|
||||
*
|
||||
* - Add the colorSensor process created above.
|
||||
* - Set the desired video resolution.
|
||||
* Since a high resolution will not improve this process, choose a lower resolution that is
|
||||
* supported by your camera. This will improve overall performance and reduce latency.
|
||||
* - Choose your video source. This may be
|
||||
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
|
||||
* or
|
||||
* .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera
|
||||
*/
|
||||
VisionPortal portal = new VisionPortal.Builder()
|
||||
.addProcessor(colorSensor)
|
||||
.setCameraResolution(new Size(320, 240))
|
||||
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||
.build();
|
||||
|
||||
telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging.
|
||||
|
||||
// WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode.
|
||||
while (opModeIsActive() || opModeInInit())
|
||||
{
|
||||
telemetry.addData("DS preview on/off", "3 dots, Camera Stream\n");
|
||||
|
||||
// Request the most recent color analysis.
|
||||
// This will return the closest matching colorSwatch and the predominant RGB color.
|
||||
// Note: to take actions based on the detected color, simply use the colorSwatch in a comparison or switch.
|
||||
// eg:
|
||||
// if (result.closestSwatch == PredominantColorProcessor.Swatch.RED) {... some code ...}
|
||||
PredominantColorProcessor.Result result = colorSensor.getAnalysis();
|
||||
|
||||
// Display the Color Sensor result.
|
||||
telemetry.addData("Best Match:", result.closestSwatch);
|
||||
telemetry.addLine(String.format("R %3d, G %3d, B %3d", Color.red(result.rgb), Color.green(result.rgb), Color.blue(result.rgb)));
|
||||
telemetry.update();
|
||||
|
||||
sleep(20);
|
||||
}
|
||||
}
|
||||
}
|
68
README.md
68
README.md
@ -264,7 +264,7 @@ In addition, we provide a large selection of sample OpModes (sample robot code)
|
||||
|
||||
## NOTICE
|
||||
|
||||
This repository contains the public FTC SDK for the CENTERSTAGE (2023-2024) competition season.
|
||||
This repository contains the public FTC SDK for the INTO THE DEEP (2024-2025) competition season.
|
||||
|
||||
## Welcome!
|
||||
This GitHub repository contains the source code that is used to build an Android app to control a *FIRST* Tech Challenge competition robot. To use this SDK, download/clone the entire project to your local computer.
|
||||
@ -323,6 +323,72 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
|
||||
|
||||
# Release Information
|
||||
|
||||
## Version 10.1 (20240919-122750)
|
||||
|
||||
### Enhancements
|
||||
* Adds new OpenCV-based `VisionProcessor`s (which may be attached to a VisionPortal in either Java or Blocks) to help teams implement color processing via computer vision in the INTO THE DEEP game
|
||||
* `ColorBlobLocatorProcessor` implements OpenCV color "blob" detection. A new sample program `ConceptVisionColorLocator` demonstrates its use.
|
||||
* A choice is offered between pre-defined color ranges, or creating a custom one in RGB, HSV, or YCrCb color space
|
||||
* The ability is provided to restrict detection to a specified Region of Interest on the screen
|
||||
* Functions for applying erosion / dilation morphing to the threshold mask are provided
|
||||
* Functions for sorting and filtering the returned data are provided
|
||||
* `PredominantColorProcessor` allows using a region of the camera as a "long range color sensor" to determine the predominant color of that region. A new sample program `ConceptVisionColorSensor` demonstrates its use.
|
||||
* The determined predominant color is selected from a discrete set of color "swatches", similar to the MINDSTORMS NXT color sensor
|
||||
* Documentation on this Color Processing feature can be found here: https://ftc-docs.firstinspires.org/color-processing
|
||||
* Added Blocks sample programs for color sensors: RobotAutoDriveToLine and SensorColor.
|
||||
* Updated Self-Inspect to identify mismatched RC/DS software versions as a "caution" rather than a "failure."
|
||||
|
||||
### Bug Fixes
|
||||
* Fixes [AngularVelocity conversion regression](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/1070)
|
||||
|
||||
## Version 10.0 (20240828-111152)
|
||||
|
||||
### Breaking Changes
|
||||
* Java classes and Blocks for TensorFlow Object Detection have been removed.
|
||||
* `AngularVelocity.unit` which was of type `AngleUnit` has been renamed `AngularVelocity.angleUnit` of type `UnnormalizedAngleUnit`
|
||||
|
||||
### Enhancements
|
||||
* Sample for REV Digital Indicator has been added - ConceptRevLED
|
||||
* Adds support for the [Sparkfun QWIIC LED Stick](https://www.sparkfun.com/products/18354)
|
||||
* To connect it directly, you need this [cable](https://www.sparkfun.com/products/25596)
|
||||
* Adds ConceptLEDStick OpMode
|
||||
* Adds Blocks for colors black, blue, cyan, dkgray, gray, green, ltgray, magenta, red, white, and yellow.
|
||||
* Adds an "evaluate but ignore result" Block that executes the connected block and ignores the result. Allows you to call a function and ignore the return value.
|
||||
* Adds I2C driver for Maxbotix Maxsonar I2CXL sonar rangefinder
|
||||
* Adds Blocks for setPwmEnable, setPwmDisable, and isPwmEnabled for servos and CR servos.
|
||||
* In the Blocks editor: a \n in the ExportToBlocks annotation's comment field is displayed as a line break.
|
||||
* Telemetry has new method setNumDecimalPlaces
|
||||
* Telemetry now formats doubles and floats (not inside objects, just by themselves)
|
||||
* Adds support for the Limelight 3A.
|
||||
* Adds initial support for the REV Servo Hub
|
||||
* Both the Robot Controller and Driver Station need to be updated to version 10.0 in order for Servo Hubs to be
|
||||
configurable as Servo Hubs. If the app on either device is outdated, the Servo Hub will show up as an Expansion Hub,
|
||||
and some functionality will not work as expected. You should wait to create a configuration that includes a Servo Hub
|
||||
until both the Driver Station and Robot Controller apps have been updated to version 10.0.
|
||||
* Updating the Servo Hub's firmware and changing its address can only be done using the REV Hardware Client at this time
|
||||
* Adds support for the REV 9-Axis IMU (REV-31-3332)
|
||||
* The REV 9-Axis IMU is only supported by the [Universal IMU interface](https://ftc-docs.firstinspires.org/en/latest/programming_resources/imu/imu.html)
|
||||
* Adds `Rev9AxisImuOrientationOnRobot` Java class.
|
||||
* If you mentally substitute this IMU's I2C port for the Control Hub's USB ports, `RevHubOrientationOnRobot` is also compatible with this sensor
|
||||
* Adds Blocks for Rev9AxisImuOrientationOnRobot, including RevHubImuOrientationOnRobot.xyzOrientation and RevHubImuOrientationOnRobot.zyxOrientation.
|
||||
* Adds Blocks samples SensorRev9AxisIMUOrthogonal and SensorRev9AxisIMUNonOrthogonal.
|
||||
* Improves Blocks support for RevHubImuOrientationOnRobot.
|
||||
* Adds Blocks for RevHubImuOrientationOnRobot.xyzOrientation and RevHubImuOrientationOnRobot.zyxOrientation.
|
||||
* Adds Blocks samples SensorHubIMUOrthogonal (replaces SensorIMU) and SensorHubIMUNonOrthogonal.
|
||||
* Updates EasyOpenCV, AprilTag, OpenCV, and `libjpeg-turbo` versions
|
||||
* Adds Blocks for max and min that take two numbers.
|
||||
* Adds Blocks OpModes ConceptRevSPARKMini, RobotAutoDriveByEncoder, RobotAutoDriveByGyro, RobotAutoDriveByTime, RobotAutoDriveToAprilTagOmni, and RobotAutoDriveToAprilTagTank.
|
||||
* Two OpModes with the same name now automatically get renamed with the name followed by a "-" and the class name allowing them to both be on the device.
|
||||
* Shows the name of the active configuration on the Manage page of the Robot Controller Console
|
||||
* Updated AprilTag Library for INTO THE DEEP. Notably, `getCurrentGameTagLibrary()` now returns INTO THE DEEP tags.
|
||||
* Adds Blocks for Telemetry.setMsTransmissionInterval and Telemetry.getMsTransmissionInterval.
|
||||
* Adds Blocks sample SensorOctoQuad.
|
||||
|
||||
### Bug Fixes
|
||||
* Fixes a bug where the RevBlinkinLedDriver Blocks were under Actuators in the Blocks editor toolbox. They are now Other Devices.
|
||||
* Fixes a bug where `Exception`s thrown in user code after a stop was requested by the Driver Station would be silently eaten
|
||||
* Fixed a bug where if you asked for `AngularVelocity` in a unit different than the device reported it in, it would normalize it between -PI and PI for radians, and -180 and 180 for degrees.
|
||||
|
||||
## Version 9.2 (20240701-085519)
|
||||
|
||||
### Important Notes
|
||||
|
@ -4,14 +4,14 @@ repositories {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation 'org.firstinspires.ftc:Inspection:10.0.0'
|
||||
implementation 'org.firstinspires.ftc:Blocks:10.0.0'
|
||||
implementation 'org.firstinspires.ftc:RobotCore:10.0.0'
|
||||
implementation 'org.firstinspires.ftc:RobotServer:10.0.0'
|
||||
implementation 'org.firstinspires.ftc:OnBotJava:10.0.0'
|
||||
implementation 'org.firstinspires.ftc:Hardware:10.0.0'
|
||||
implementation 'org.firstinspires.ftc:FtcCommon:10.0.0'
|
||||
implementation 'org.firstinspires.ftc:Vision:10.0.0'
|
||||
implementation 'org.firstinspires.ftc:Inspection:10.1.0'
|
||||
implementation 'org.firstinspires.ftc:Blocks:10.1.0'
|
||||
implementation 'org.firstinspires.ftc:RobotCore:10.1.0'
|
||||
implementation 'org.firstinspires.ftc:RobotServer:10.1.0'
|
||||
implementation 'org.firstinspires.ftc:OnBotJava:10.1.0'
|
||||
implementation 'org.firstinspires.ftc:Hardware:10.1.0'
|
||||
implementation 'org.firstinspires.ftc:FtcCommon:10.1.0'
|
||||
implementation 'org.firstinspires.ftc:Vision:10.1.0'
|
||||
implementation 'androidx.appcompat:appcompat:1.2.0'
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user