diff --git a/.github/workflows/static.yml b/.github/workflows/static.yml new file mode 100644 index 0000000..0ba8230 --- /dev/null +++ b/.github/workflows/static.yml @@ -0,0 +1,43 @@ +# Simple workflow for deploying static content to GitHub Pages +name: Deploy static content to Pages + +on: + # Runs on pushes targeting the default branch + push: + branches: ["master"] + + # Allows you to run this workflow manually from the Actions tab + workflow_dispatch: + +# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages +permissions: + contents: read + pages: write + id-token: write + +# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued. +# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete. +concurrency: + group: "pages" + cancel-in-progress: false + +jobs: + # Single deploy job since we're just deploying + deploy: + environment: + name: github-pages + url: ${{ steps.deployment.outputs.page_url }} + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Pages + uses: actions/configure-pages@v5 + - name: Upload artifact + uses: actions/upload-pages-artifact@v3 + with: + # Upload entire repository + path: '.' + - name: Deploy to GitHub Pages + id: deployment + uses: actions/deploy-pages@v4 diff --git a/FtcRobotController/build.gradle b/FtcRobotController/build.gradle index e25651f..9fa2169 100644 --- a/FtcRobotController/build.gradle +++ b/FtcRobotController/build.gradle @@ -14,7 +14,11 @@ android { buildConfigField "String", "APP_BUILD_TIME", '"' + (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(new Date())) + '"' } - compileSdkVersion 29 + buildFeatures { + buildConfig = true + } + + compileSdkVersion 30 compileOptions { sourceCompatibility JavaVersion.VERSION_1_8 diff --git a/FtcRobotController/src/main/AndroidManifest.xml b/FtcRobotController/src/main/AndroidManifest.xml index 3b87d9d..1ce6a3e 100644 --- a/FtcRobotController/src/main/AndroidManifest.xml +++ b/FtcRobotController/src/main/AndroidManifest.xml @@ -1,8 +1,8 @@ + android:versionCode="57" + android:versionName="10.1.1"> diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptAprilTagLocalization.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptAprilTagLocalization.java index 05318ba..d90261e 100644 --- a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptAprilTagLocalization.java +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptAprilTagLocalization.java @@ -58,6 +58,9 @@ import java.util.List; * When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the robot, relative to the field origin. * This information is provided in the "robotPose" member of the returned "detection". * + * To learn about the Field Coordinate System that is defined for FTC (and used by this OpMode), see the FTC-DOCS link below: + * https://ftc-docs.firstinspires.org/en/latest/game_specific_resources/field_coordinate_system/field-coordinate-system.html + * * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list. */ diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator.java new file mode 100644 index 0000000..987694d --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorLocator.java @@ -0,0 +1,191 @@ +/* + * Copyright (c) 2024 Phil Malone + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import android.util.Size; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; +import com.qualcomm.robotcore.util.SortOrder; + +import org.firstinspires.ftc.robotcore.external.Telemetry; +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; +import org.firstinspires.ftc.vision.VisionPortal; +import org.firstinspires.ftc.vision.opencv.ColorBlobLocatorProcessor; +import org.firstinspires.ftc.vision.opencv.ColorRange; +import org.firstinspires.ftc.vision.opencv.ImageRegion; +import org.opencv.core.RotatedRect; + +import java.util.List; + +/* + * This OpMode illustrates how to use a video source (camera) to locate specifically colored regions + * + * Unlike a "color sensor" which determines the color of an object in the field of view, this "color locator" + * will search the Region Of Interest (ROI) in a camera image, and find any "blobs" of color that match the requested color range. + * These blobs can be further filtered and sorted to find the one most likely to be the item the user is looking for. + * + * To perform this function, a VisionPortal runs a ColorBlobLocatorProcessor process. + * The ColorBlobLocatorProcessor process is created first, and then the VisionPortal is built to use this process. + * The ColorBlobLocatorProcessor analyses the ROI and locates pixels that match the ColorRange to form a "mask". + * The matching pixels are then collected into contiguous "blobs" of pixels. The outer boundaries of these blobs are called its "contour". + * For each blob, the process then creates the smallest possible rectangle "boxFit" that will fully encase the contour. + * The user can then call getBlobs() to retrieve the list of Blobs, where each Blob contains the contour and the boxFit data. + * Note: The default sort order for Blobs is ContourArea, in descending order, so the biggest contours are listed first. + * + * To aid the user, a colored boxFit rectangle is drawn on the camera preview to show the location of each Blob + * The original Blob contour can also be added to the preview. This is helpful when configuring the ColorBlobLocatorProcessor parameters. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + */ + +@Disabled +@TeleOp(name = "Concept: Vision Color-Locator", group = "Concept") +public class ConceptVisionColorLocator extends LinearOpMode +{ + @Override + public void runOpMode() + { + /* Build a "Color Locator" vision processor based on the ColorBlobLocatorProcessor class. + * - Specify the color range you are looking for. You can use a predefined color, or create you own color range + * .setTargetColorRange(ColorRange.BLUE) // use a predefined color match + * Available predefined colors are: RED, BLUE YELLOW GREEN + * .setTargetColorRange(new ColorRange(ColorSpace.YCrCb, // or define your own color match + * new Scalar( 32, 176, 0), + * new Scalar(255, 255, 132))) + * + * - Focus the color locator by defining a RegionOfInterest (ROI) which you want to search. + * This can be the entire frame, or a sub-region defined using: + * 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system. + * Use one form of the ImageRegion class to define the ROI. + * ImageRegion.entireFrame() + * ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner + * ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5) 50% width/height square centered on screen + * + * - Define which contours are included. + * You can get ALL the contours, or you can skip any contours that are completely inside another contour. + * .setContourMode(ColorBlobLocatorProcessor.ContourMode.ALL_FLATTENED_HIERARCHY) // return all contours + * .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude contours inside other contours + * note: EXTERNAL_ONLY helps to avoid bright reflection spots from breaking up areas of solid color. + * + * - turn the display of contours ON or OFF. Turning this on helps debugging but takes up valuable CPU time. + * .setDrawContours(true) + * + * - include any pre-processing of the image or mask before looking for Blobs. + * There are some extra processing you can include to improve the formation of blobs. Using these features requires + * an understanding of how they may effect the final blobs. The "pixels" argument sets the NxN kernel size. + * .setBlurSize(int pixels) Blurring an image helps to provide a smooth color transition between objects, and smoother contours. + * The higher the number of pixels, the more blurred the image becomes. + * Note: Even "pixels" values will be incremented to satisfy the "odd number" requirement. + * Blurring too much may hide smaller features. A "pixels" size of 5 is good for a 320x240 image. + * .setErodeSize(int pixels) Erosion removes floating pixels and thin lines so that only substantive objects remain. + * Erosion can grow holes inside regions, and also shrink objects. + * "pixels" in the range of 2-4 are suitable for low res images. + * .setDilateSize(int pixels) Dilation makes objects more visible by filling in small holes, making lines appear thicker, + * and making filled shapes appear larger. Dilation is useful for joining broken parts of an + * object, such as when removing noise from an image. + * "pixels" in the range of 2-4 are suitable for low res images. + */ + ColorBlobLocatorProcessor colorLocator = new ColorBlobLocatorProcessor.Builder() + .setTargetColorRange(ColorRange.BLUE) // use a predefined color match + .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude blobs inside blobs + .setRoi(ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5)) // search central 1/4 of camera view + .setDrawContours(true) // Show contours on the Stream Preview + .setBlurSize(5) // Smooth the transitions between different colors in image + .build(); + + /* + * Build a vision portal to run the Color Locator process. + * + * - Add the colorLocator process created above. + * - Set the desired video resolution. + * Since a high resolution will not improve this process, choose a lower resolution that is + * supported by your camera. This will improve overall performance and reduce latency. + * - Choose your video source. This may be + * .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam + * or + * .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera + */ + VisionPortal portal = new VisionPortal.Builder() + .addProcessor(colorLocator) + .setCameraResolution(new Size(320, 240)) + .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) + .build(); + + telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging. + telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE); + + // WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode. + while (opModeIsActive() || opModeInInit()) + { + telemetry.addData("preview on/off", "... Camera Stream\n"); + + // Read the current list + List blobs = colorLocator.getBlobs(); + + /* + * The list of Blobs can be filtered to remove unwanted Blobs. + * Note: All contours will be still displayed on the Stream Preview, but only those that satisfy the filter + * conditions will remain in the current list of "blobs". Multiple filters may be used. + * + * Use any of the following filters. + * + * ColorBlobLocatorProcessor.Util.filterByArea(minArea, maxArea, blobs); + * A Blob's area is the number of pixels contained within the Contour. Filter out any that are too big or small. + * Start with a large range and then refine the range based on the likely size of the desired object in the viewfinder. + * + * ColorBlobLocatorProcessor.Util.filterByDensity(minDensity, maxDensity, blobs); + * A blob's density is an indication of how "full" the contour is. + * If you put a rubber band around the contour you would get the "Convex Hull" of the contour. + * The density is the ratio of Contour-area to Convex Hull-area. + * + * ColorBlobLocatorProcessor.Util.filterByAspectRatio(minAspect, maxAspect, blobs); + * A blob's Aspect ratio is the ratio of boxFit long side to short side. + * A perfect Square has an aspect ratio of 1. All others are > 1 + */ + ColorBlobLocatorProcessor.Util.filterByArea(50, 20000, blobs); // filter out very small blobs. + + /* + * The list of Blobs can be sorted using the same Blob attributes as listed above. + * No more than one sort call should be made. Sorting can use ascending or descending order. + * ColorBlobLocatorProcessor.Util.sortByArea(SortOrder.DESCENDING, blobs); // Default + * ColorBlobLocatorProcessor.Util.sortByDensity(SortOrder.DESCENDING, blobs); + * ColorBlobLocatorProcessor.Util.sortByAspectRatio(SortOrder.DESCENDING, blobs); + */ + + telemetry.addLine(" Area Density Aspect Center"); + + // Display the size (area) and center location for each Blob. + for(ColorBlobLocatorProcessor.Blob b : blobs) + { + RotatedRect boxFit = b.getBoxFit(); + telemetry.addLine(String.format("%5d %4.2f %5.2f (%3d,%3d)", + b.getContourArea(), b.getDensity(), b.getAspectRatio(), (int) boxFit.center.x, (int) boxFit.center.y)); + } + + telemetry.update(); + sleep(50); + } + } +} diff --git a/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorSensor.java b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorSensor.java new file mode 100644 index 0000000..6be2bc4 --- /dev/null +++ b/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptVisionColorSensor.java @@ -0,0 +1,136 @@ +/* + * Copyright (c) 2024 Phil Malone + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.firstinspires.ftc.robotcontroller.external.samples; + +import android.graphics.Color; +import android.util.Size; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName; +import org.firstinspires.ftc.vision.VisionPortal; +import org.firstinspires.ftc.vision.opencv.ImageRegion; +import org.firstinspires.ftc.vision.opencv.PredominantColorProcessor; + +/* + * This OpMode illustrates how to use a video source (camera) as a color sensor + * + * A "color sensor" will typically determine the color of the object that it is pointed at. + * + * This sample performs the same function, except it uses a video camera to inspect an object or scene. + * The user may choose to inspect all, or just a Region of Interest (ROI), of the active camera view. + * The user must also provide a list of "acceptable colors" (Swatches) from which the closest matching color will be selected. + * + * To perform this function, a VisionPortal runs a PredominantColorProcessor process. + * The PredominantColorProcessor process is created first, and then the VisionPortal is built to use this process. + * The PredominantColorProcessor analyses the ROI and splits the colored pixels into several color-clusters. + * The largest of these clusters is then considered to be the "Predominant Color" + * The process then matches the Predominant Color with the closest Swatch and returns that match. + * + * To aid the user, a colored rectangle is drawn on the camera preview to show the RegionOfInterest, + * The Predominant Color is used to paint the rectangle border, so the user can verify that the color is reasonable. + * + * Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name. + * Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list + */ + +@Disabled +@TeleOp(name = "Concept: Vision Color-Sensor", group = "Concept") +public class ConceptVisionColorSensor extends LinearOpMode +{ + @Override + public void runOpMode() + { + /* Build a "Color Sensor" vision processor based on the PredominantColorProcessor class. + * + * - Focus the color sensor by defining a RegionOfInterest (ROI) which you want to inspect. + * This can be the entire frame, or a sub-region defined using: + * 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system. + * Use one form of the ImageRegion class to define the ROI. + * ImageRegion.entireFrame() + * ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner + * ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1) 10% width/height square centered on screen + * + * - Set the list of "acceptable" color swatches (matches). + * Only colors that you assign here will be returned. + * If you know the sensor will be pointing to one of a few specific colors, enter them here. + * Or, if the sensor may be pointed randomly, provide some additional colors that may match the surrounding. + * Possible choices are: + * RED, ORANGE, YELLOW, GREEN, CYAN, BLUE, PURPLE, MAGENTA, BLACK, WHITE; + * + * Note that in the example shown below, only some of the available colors are included. + * This will force any other colored region into one of these colors. + * eg: Green may be reported as YELLOW, as this may be the "closest" match. + */ + PredominantColorProcessor colorSensor = new PredominantColorProcessor.Builder() + .setRoi(ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1)) + .setSwatches( + PredominantColorProcessor.Swatch.RED, + PredominantColorProcessor.Swatch.BLUE, + PredominantColorProcessor.Swatch.YELLOW, + PredominantColorProcessor.Swatch.BLACK, + PredominantColorProcessor.Swatch.WHITE) + .build(); + + /* + * Build a vision portal to run the Color Sensor process. + * + * - Add the colorSensor process created above. + * - Set the desired video resolution. + * Since a high resolution will not improve this process, choose a lower resolution that is + * supported by your camera. This will improve overall performance and reduce latency. + * - Choose your video source. This may be + * .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam + * or + * .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera + */ + VisionPortal portal = new VisionPortal.Builder() + .addProcessor(colorSensor) + .setCameraResolution(new Size(320, 240)) + .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) + .build(); + + telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging. + + // WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode. + while (opModeIsActive() || opModeInInit()) + { + telemetry.addData("DS preview on/off", "3 dots, Camera Stream\n"); + + // Request the most recent color analysis. + // This will return the closest matching colorSwatch and the predominant RGB color. + // Note: to take actions based on the detected color, simply use the colorSwatch in a comparison or switch. + // eg: + // if (result.closestSwatch == PredominantColorProcessor.Swatch.RED) {... some code ...} + PredominantColorProcessor.Result result = colorSensor.getAnalysis(); + + // Display the Color Sensor result. + telemetry.addData("Best Match:", result.closestSwatch); + telemetry.addLine(String.format("R %3d, G %3d, B %3d", Color.red(result.rgb), Color.green(result.rgb), Color.blue(result.rgb))); + telemetry.update(); + + sleep(20); + } + } +} diff --git a/README.md b/README.md index 5353afb..164bcb3 100644 --- a/README.md +++ b/README.md @@ -1,1553 +1,11 @@ -## NOTICE +# Welcome to the Offical Pedro Pathing Quickstart! -This repository contains the public FTC SDK for the INTO THE DEEP (2024-2025) competition season. +To begin, clone this repo and then open it in Android Studio. -## Welcome! -This GitHub repository contains the source code that is used to build an Android app to control a *FIRST* Tech Challenge competition robot. To use this SDK, download/clone the entire project to your local computer. +Then, follow the steps on our [website](https://pedropathing.com/)! -## Requirements -To use this Android Studio project, you will need Android Studio 2021.2 (codename Chipmunk) or later. +For Path Generation, you can use this [path generator](https://discord.gg/2GfC4qBP5s). -To program your robot in Blocks or OnBot Java, you do not need Android Studio. +--- -## Getting Started -If you are new to robotics or new to the *FIRST* Tech Challenge, then you should consider reviewing the [FTC Blocks Tutorial](https://ftc-docs.firstinspires.org/programming_resources/blocks/Blocks-Tutorial.html) to get familiar with how to use the control system: - -      [FTC Blocks Online Tutorial](https://ftc-docs.firstinspires.org/programming_resources/blocks/Blocks-Tutorial.html) - -Even if you are an advanced Java programmer, it is helpful to start with the [FTC Blocks tutorial](https://ftc-docs.firstinspires.org/programming_resources/blocks/Blocks-Tutorial.html), and then migrate to the [OnBot Java Tool](https://ftc-docs.firstinspires.org/programming_resources/onbot_java/OnBot-Java-Tutorial.html) or to [Android Studio](https://ftc-docs.firstinspires.org/programming_resources/android_studio_java/Android-Studio-Tutorial.html) afterwards. - -## Downloading the Project -If you are an Android Studio programmer, there are several ways to download this repo. Note that if you use the Blocks or OnBot Java Tool to program your robot, then you do not need to download this repository. - -* If you are a git user, you can clone the most current version of the repository: - -

            git clone https://github.com/FIRST-Tech-Challenge/FtcRobotController.git

- -* Or, if you prefer, you can use the "Download Zip" button available through the main repository page. Downloading the project as a .ZIP file will keep the size of the download manageable. - -* You can also download the project folder (as a .zip or .tar.gz archive file) from the Downloads subsection of the [Releases](https://github.com/FIRST-Tech-Challenge/FtcRobotController/releases) page for this repository. - -* The Releases page also contains prebuilt APKs. - -Once you have downloaded and uncompressed (if needed) your folder, you can use Android Studio to import the folder ("Import project (Eclipse ADT, Gradle, etc.)"). - -## Getting Help -### User Documentation and Tutorials -*FIRST* maintains online documentation with information and tutorials on how to use the *FIRST* Tech Challenge software and robot control system. You can access this documentation using the following link: - -      [FIRST Tech Challenge Documentation](https://ftc-docs.firstinspires.org/index.html) - -Note that the online documentation is an "evergreen" document that is constantly being updated and edited. It contains the most current information about the *FIRST* Tech Challenge software and control system. - -### Javadoc Reference Material -The Javadoc reference documentation for the FTC SDK is now available online. Click on the following link to view the FTC SDK Javadoc documentation as a live website: - -      [FTC Javadoc Documentation](https://javadoc.io/doc/org.firstinspires.ftc) - -### Online User Forum -For technical questions regarding the Control System or the FTC SDK, please visit the FIRST Tech Challenge Community site: - -      [FIRST Tech Challenge Community](https://ftc-community.firstinspires.org/) - -### Sample OpModes -This project contains a large selection of Sample OpModes (robot code examples) which can be cut and pasted into your /teamcode folder to be used as-is, or modified to suit your team's needs. - -Samples Folder:    [/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples](FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples) - -The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc/teamcode](TeamCode/src/main/java/org/firstinspires/ftc/teamcode) folder contains an explanation of the sample naming convention, and instructions on how to copy them to your own project space. - -# Release Information - -## Version 10.0 (20240828-111152) - -### Breaking Changes -* Java classes and Blocks for TensorFlow Object Detection have been removed. -* `AngularVelocity.unit` which was of type `AngleUnit` has been renamed `AngularVelocity.angleUnit` of type `UnnormalizedAngleUnit` - -### Enhancements -* Sample for REV Digital Indicator has been added - ConceptRevLED -* Adds support for the [Sparkfun QWIIC LED Stick](https://www.sparkfun.com/products/18354) - * To connect it directly, you need this [cable](https://www.sparkfun.com/products/25596) -* Adds ConceptLEDStick OpMode -* Adds Blocks for colors black, blue, cyan, dkgray, gray, green, ltgray, magenta, red, white, and yellow. -* Adds an "evaluate but ignore result" Block that executes the connected block and ignores the result. Allows you to call a function and ignore the return value. -* Adds I2C driver for Maxbotix Maxsonar I2CXL sonar rangefinder -* Adds Blocks for setPwmEnable, setPwmDisable, and isPwmEnabled for servos and CR servos. -* In the Blocks editor: a \n in the ExportToBlocks annotation's comment field is displayed as a line break. -* Telemetry has new method setNumDecimalPlaces -* Telemetry now formats doubles and floats (not inside objects, just by themselves) -* Adds support for the Limelight 3A. -* Adds initial support for the REV Servo Hub - * Both the Robot Controller and Driver Station need to be updated to version 10.0 in order for Servo Hubs to be - configurable as Servo Hubs. If the app on either device is outdated, the Servo Hub will show up as an Expansion Hub, - and some functionality will not work as expected. You should wait to create a configuration that includes a Servo Hub - until both the Driver Station and Robot Controller apps have been updated to version 10.0. - * Updating the Servo Hub's firmware and changing its address can only be done using the REV Hardware Client at this time -* Adds support for the REV 9-Axis IMU (REV-31-3332) - * The REV 9-Axis IMU is only supported by the [Universal IMU interface](https://ftc-docs.firstinspires.org/en/latest/programming_resources/imu/imu.html) - * Adds `Rev9AxisImuOrientationOnRobot` Java class. - * If you mentally substitute this IMU's I2C port for the Control Hub's USB ports, `RevHubOrientationOnRobot` is also compatible with this sensor - * Adds Blocks for Rev9AxisImuOrientationOnRobot, including RevHubImuOrientationOnRobot.xyzOrientation and RevHubImuOrientationOnRobot.zyxOrientation. - * Adds Blocks samples SensorRev9AxisIMUOrthogonal and SensorRev9AxisIMUNonOrthogonal. -* Improves Blocks support for RevHubImuOrientationOnRobot. - * Adds Blocks for RevHubImuOrientationOnRobot.xyzOrientation and RevHubImuOrientationOnRobot.zyxOrientation. - * Adds Blocks samples SensorHubIMUOrthogonal (replaces SensorIMU) and SensorHubIMUNonOrthogonal. -* Updates EasyOpenCV, AprilTag, OpenCV, and `libjpeg-turbo` versions -* Adds Blocks for max and min that take two numbers. -* Adds Blocks OpModes ConceptRevSPARKMini, RobotAutoDriveByEncoder, RobotAutoDriveByGyro, RobotAutoDriveByTime, RobotAutoDriveToAprilTagOmni, and RobotAutoDriveToAprilTagTank. -* Two OpModes with the same name now automatically get renamed with the name followed by a "-" and the class name allowing them to both be on the device. -* Shows the name of the active configuration on the Manage page of the Robot Controller Console -* Updated AprilTag Library for INTO THE DEEP. Notably, `getCurrentGameTagLibrary()` now returns INTO THE DEEP tags. -* Adds Blocks for Telemetry.setMsTransmissionInterval and Telemetry.getMsTransmissionInterval. -* Adds Blocks sample SensorOctoQuad. - -### Bug Fixes -* Fixes a bug where the RevBlinkinLedDriver Blocks were under Actuators in the Blocks editor toolbox. They are now Other Devices. -* Fixes a bug where `Exception`s thrown in user code after a stop was requested by the Driver Station would be silently eaten -* Fixed a bug where if you asked for `AngularVelocity` in a unit different than the device reported it in, it would normalize it between -PI and PI for radians, and -180 and 180 for degrees. - -## Version 9.2 (20240701-085519) - -### Important Notes -* Java classes and Blocks for TensorFlow Object Detection have been deprecated and will be removed in Version 10.0. -* The samples that use TensorFlow Object Detection have been removed. - -### Enhancements -* Adds explanatory text to failed items on the inspection activities. To view the explanatory text tap the red warning icon for a failed item. -* In the Blocks editor: added a new kind of variable set block that sets the variable and also returns the new value. -* Changes the way that camera controls behave for a SwitchableCamera. Now, each method (such as getExposure, getMinExposure, getMaxExposure, setExposure for ExposureControl) acts on the currently active camera. -* Adds support for the REV USB PS4 Compatible Gamepad (REV-31-2983) -* Adds ConceptAprilTagMultiPortal OpMode -* Adds support for OctoQuad Quadrature Encoder & Pulse Width Interface Module -* Adds the ExportAprilTagLibraryToBlocks annotation that indicates that a static method that returns an AprilTagLibrary is exported to the Blocks programming environment. The corresponding block will appear in the Blocks toolbox along with the built-in tag libraries. -* Adds Blocks OpMode ConceptAprilTagOptimizeExposure. -* Adds support for the SparkFun Optical Tracking Odometry sensor. - -### Bug Fixes -* Fixes https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/942 where visionPortal.close() can cause an IndexOutOfBoundsError. -* Fixes a bug in the blocks editor where collapsed function blocks show a warning "Collapsed blocks contain warnings." when the Blocks OpMode is reopened. -* Fixes a bug where the blocks editor wouldn't warn you that you have unsaved changes when you try to leave. This bug was introduced due to a behavior change in Chrome 119. -* [Issue #764](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/764) - Get gain control returns a null pointer for a switchable camera -* Fixes a bug where the correct deadzone for certain gamepads was not applied when Advanced Gamepad Features was enabled - -## Version 9.1 (20240215-115542) - -### Enhancements -* Fixes a problem with Blocks: if the user closes a Block's warning balloon, it will still be closed next time the project is opened in the Blocks editor. -* In the Blocks editor, an alert concerning missing hardware devices is not shown if all the Blocks that use the missing hardware devices are disabled. -* Adds Blocks to support comparing property values CRServo.Direction, DCMotor.Direction, DCMotor.Mode, DCMotor.ZeroPowerBehavior, DigitalChannel.Mode, GyroSensor.HeadingMode, IrSeekerSensor.Mode, and Servo.Direction, to the corresponding enum Block. -* Improves OnBotJava auto-import to correctly import classes when used in certain situations. -* Improves OnBotJava autocomplete to provide better completion options in most cases. - * This fixes an issue where autocomplete would fail if a method with two or more formal parameters was defined. -* In OnBotJava, code folding support was added to expand and collapse code sections -* In OnBotJava, the copyright header is now automatically collapsed loading new files -* For all Blocks OpMode samples, intro comments have been moved to the RunOpMode comment balloon. -* The Clean up Blocks command in the Blocks editor now positions function Blocks so their comment balloons don't overlap other function Blocks. -* Added Blocks OpMode sample SensorTouch. -* Added Java OpMode sample SensorDigitalTouch. -* Several improvements to VisionPortal - * Adds option to control whether the stream is automatically started following a `.build()` call on a VisionPortal Builder - * Adds option to control whether the vision processing statistics overlay is rendered or not - * VisionPortals now implement the `CameraStreamSource` interface, allowing multiportal users to select which portal is routed to the DS in INIT by calling CameraStreamServer.getInstance().setSource(visionPortal). Can be selected via gamepad, between Camera Stream sessions. - * Add option to `AprilTagProcessor` to suppress calibration warnings - * Improves camera calibration warnings - * If a calibration is scaled, the resolution it was scaled from will be listed - * If calibrations exist with the wrong aspect ratio, the calibrated resolutions will be listed - * Fixes race condition which caused app crash when calling `stopStreaming()` immediately followed by `close()` on a VisionPortal - * Fixes IllegalStateException when calling `stopStreaming()` immediately after building a VisionPortal - * Added FTC Blocks counterparts to new Java methods: - * VisionPortal.Builder.setAutoStartStreamOnBuild - * VisionPortal.Builder.setShowStatsOverlay - * AprilTagProcessor.Builder.setSuppressCalibrationWarnings - * CameraStreamServer.setSource​ - -### Bug Fixes -* Fixes a problem where OnBotJava does not apply font size settings to the editor. -* Updates EasyOpenCV dependency to v1.7.1 - * Fixes inability to use EasyOpenCV CameraFactory in OnBotJava - * Fixes entire RC app crash when user pipeline throws an exception - * Fixes entire RC app crash when user user canvas annotator throws an exception - * Use the modern stacktrace display when handling user exceptions instead of the legacy ESTOP telemetry message - -## Version 9.0.1 (20230929-083754) - -### Enhancements -* Updates AprilTag samples to include Decimation and additional Comments. Also corrects misleading tag ID warnings -* Increases maximum size of Blocks inline comments to 140 characters -* Adds Blocks sample BasicOmniOpMode. -* Updated CENTERSTAGE library AprilTag orientation quaternions - * Thanks [@FromenActual](https://github.com/FromenActual) -* Updated Java Sample ConceptTensorFlowObjectDetection.java to include missing elements needed for custom model support. - -### Bug Fixes -* Fixes a problem where after October 1 the Driver Station will report as obsolete on v9.0 and prompt the user to update. - -## Version 9.0 (20230830-154348) - -### Breaking Changes -* Removes Vuforia -* Fields in `AprilTagDetection` and `AprilTagPose(ftc/raw)` objects are now `final` -* VisionPortal builder method `setCameraMonitorViewId()` has been renamed to `setLiveViewContainerId()` and `enableCameraMonitoring()` has been renamed to `enableLiveView()` - -### Enhancements -* Adds support for the DFRobot HuskyLens Vision Sensor. -* Blocks teams can now perform webcam calibration. - * Added a Block for System.currentTimeMillis (under Utilities/Time) - * Added a Block for VisionPortal.saveNextFrameRaw (under Vision/VisionPortal) - * Added a new sample Blocks OpMode called UtilityCameraFrameCapture. -* The RobotDriveByGyro sample has been updated to use the new universal IMU interface. It now supports both IMU types. -* Removed some error-prone ElapsedTime Blocks from the Blocks editor's toolbox. This is not a - breaking change: old Blocks OpModes that use these Blocks will still function, both in the - Blocks editor and at runtime. -* Standardizes on the form "OpMode" for the term OpMode. - * The preferred way to refer to OpModes that specifically extend `LinearOpMode` (including Blocks OpModes) is "linear OpMode". - * The preferred way to refer to OpModes that specifically extend `OpMode` directly is "iterative OpMode". -* Overhauls `OpMode` and `LinearOpMode` Javadoc comments to be easier to read and include more detail. -* Makes minor enhancements to Java samples - * Javadoc comments in samples that could be rendered badly in Android Studio have been converted to standard multi-line comments - * Consistency between samples has been improved - * The SensorDigitalTouch sample has been replaced with a new SensorTouch sample that uses the `TouchSensor` interface instead of `DigitalChannel`. - * The ConceptCompassCalibration, SensorMRCompass, and SensorMRIRSeeker samples have been deleted, as they are not useful for modern FTC competitions. - -### Bug Fixes -* Fixes a bug which prevented PlayStation gamepads from being used in bluetooth mode. Bluetooth is NOT legal for competition but may be useful to allow a DS device to be used while charging, or at an outreach event. -* Fixes a bug where a Blocks OpMode's Date Modified value can change to December 31, 1969, if the Control Hub is rebooted while the Blocks OpMode is being edited. -* Fixes the automatic TeleOp preselection feature (was broken in 8.2) -* Fixes a bug where passing an integer number such as 123 to the Telemetry.addData block that takes a number shows up as 123.0 in the telemetry. -* Fixes OnBotJava autocomplete issues: - * Autocomplete would incorrectly provide values for the current class when autocompleting a local variable - * `hardwareMap` autocomplete would incorrectly include lambda class entries -* Fixes OnBotJava not automatically importing classes. -* Fixes OnBotJava tabs failing to close when their file is deleted. -* Fixes a project view refresh not happening when a file is renamed in OnBotJava. -* Fixes the "Download" context menu item for external libraries in the OnBotJava interface. -* Fixes issue where Driver Station telemetry would intermittently freeze when set to Monospace mode. -* Fixes performance regression for certain REV Hub operations that was introduced in version 8.2. -* Fixes TagID comparison logic in DriveToTag samples. - -## Version 8.2 (20230707-131020) - -### Breaking Changes -* Non-linear (iterative) OpModes are no longer allowed to manipulate actuators in their `stop()` method. Attempts to do so will be ignored and logged. - * When an OpMode attempts to illegally manipulate an actuator, the Robot Controller will print a log message - including the text `CANCELLED_FOR_SAFETY`. - * Additionally, LinearOpModes are no longer able to regain the ability to manipulate actuators by removing their - thread's interrupt or using another thread. -* Removes support for Android version 6.0 (Marshmallow). The minSdkVersion is now 24. -* Increases the Robocol version. - * This means an 8.2 or later Robot Controller or Driver Station will not be able to communicate with an 8.1 or earlier Driver Station or Robot Controller. - * If you forget to update both apps at the same time, an error message will be shown explaining which app is older and should be updated. -* FTC_FieldCoordinateSystemDefinition.pdf has been moved. It is still in the git history, but has been removed from the git snapshot corresponding with the 8.2 tag. The official version now lives at [Field Coordinate System](https://ftc-docs.firstinspires.org/field-coordinate-system). -* `LynxUsbDevice.addConfiguredModule()` and `LynxUsbDevice.getConfiguredModule()` have been replaced with `LynxUsbDevice.getOrAddModule()`. -* Old Blocks for Vuforia and TensorFlow Object Detection are obsolete and have been removed from the - Blocks editor's toolbox. Existing Blocks OpModes that contain the old Blocks for Vuforia or - TensorFlow Object Detection can be opened in the Blocks editor, but running them will not work. - -### New features -* Adds new `VisionPortal` API for computer vision - * **This API may be subject to change for final kickoff release!** - * Several new samples added. - * Adds support for detecting AprilTags. - * `VisionPortal` is the new entry point for both AprilTag and TFOD processing. - * Vuforia will be removed in a future release. - * Updated TensorFlow dependencies. - * Added support for webcam camera controls to blocks. - * The Blocks editor's toolbox now has a Vision category, directly above the Utilities category. -* Related documentation for associated technologies can be found at - * [AprilTag Introduction](https://ftc-docs.firstinspires.org/apriltag-intro) - * [AprilTag SDK Guide](https://ftc-docs.firstinspires.org/apriltag-sdk) - * [AprilTag Detection Values](https://ftc-docs.firstinspires.org/apriltag-detection-values) - * [AprilTag Test Images](https://ftc-docs.firstinspires.org/apriltag-test-images) - * [Camera Calibration](https://ftc-docs.firstinspires.org/camera-calibration) -* Adds Driver Station support for Logitech Dual Action and Sony PS5 DualSense gamepads. - * This **does not** include support for the Sony PS5 DualSense Edge gamepad. - * Always refer to Game Manual 1 to determine gamepad legality in competition. -* Adds support for MJPEG payload streaming to UVC driver (external JPEG decompression routine required for use). -* Shows a hint on the Driver Station UI about how to bind a gamepad when buttons are pressed or the sticks are moved on an unbound gamepad. -* Adds option for fullscreening "Camera Stream" on Driver Station. -* OnBotJava source code is automatically saved as a ZIP file on every build with a rolling window of the last 30 builds kept; allows recovering source code from previous builds if code is accidentally deleted or corrupted. -* Adds support for changing the addresses of Expansion Hubs that are not connected directly via USB. - * The Expansion Hub Address Change screen now has an Apply button that changes the addresses without leaving the screen. - * Addresses that are assigned to other hubs connected to the same USB connection or Control Hub are no longer able to be selected. -* Increases maximum size of Blocks inline comments to 100 characters -* Saves position of open Blocks comment balloons -* Adds new AprilTag Driving samples: RobotDriveToAprilTagTank & RobotDriveToAprilTagOmni -* Adds Sample to illustrate optimizing camera exposure for AprilTags: ConceptAprilTagOptimizeExposure - -### Bug Fixes -* Corrects inspection screen to report app version using the SDK version defined in the libraries instead of the version specified in `AndroidManifest.xml`. This corrects the case where the app could show matching versions numbers to the user but still state that the versions did not match. - * If the version specified in `AndroidManifest.xml` does not match the SDK version, an SDK version entry will be displayed on the Manage webpage. -* Fixes no error being displayed when saving a configuration file with duplicate names from the Driver Station. -* Fixes a deadlock in the UVC driver which manifested in https://github.com/OpenFTC/EasyOpenCV/issues/57. -* Fixes a deadlock in the UVC driver that could occur when hot-plugging cameras. -* Fixes UVC driver compatibility with Arducam OV9281 global shutter camera. -* Fixes Emergency Stop condition when an OnBotJava build with duplicate OpMode names occurs. -* Fixes known causes of "Attempted use of a closed LynxModule instance" logspam. -* Fixes the visual identification LED pattern when configuring Expansion Hubs connected via RS-485. - -## Version 8.1.1 (20221201-150726) - -This is a bug fix only release to address the following four issues. - -* [Issue #492](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/492) - Can't create new blocks opmodes. -* [Issue #495](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/495) - Remove the final modifier from the OpMode's Telemetry object. -* [Issue #500](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/500) - Some devices cannot be configured when the Driver Station app has been updated to 8.1 - * Updating either the Robot Controller app or the Driver Station app to 8.1.1 or later will fix this issue. -* The Modern Robotics touch sensor was configurable as a Digital Device. It can only be used as an Analog Device. - -## Version 8.1 (20221121-115119) - -### Breaking Changes -* Deprecates the `OpMode` fields `msStuckDetectInit`, `msStuckDetectInitLoop`, `msStuckDetectStart`, `msStuckDetectLoop`, and `msStuckDetectStop`. - * OpModes no longer have a time limit for `init()`, `init_loop()`, `start()` or `loop()`, so the fields corresponding to those methods are no longer used. - * `stop()` still has a time limit, but it is now hardcoded to be 1 second, and cannot be changed using `msStuckDetectStop`. -* Deprecates the `OpMode` methods `internalPreInit()`, `internalPostInitLoop()`, and `internalPostLoop()`. - * Iterative `OpMode`s will continue to call these methods in case they were overridden. - * These methods will not be called at all for `LinearOpMode`s. -* Deprecates (and stops respecting) `DeviceProperties.xmlTagAliases`. - -### Enhancements -* Adds a new `IMU` interface to Blocks and Java that can be used with both the original BNO055 IMU - included in all older Control Hubs and Expansion Hubs, and the new alternative BHI260AP IMU. - * You can determine which type of IMU is in your Control Hub by navigating to the Manage page of the web interface. - * To learn how to use the new `IMU` interface, see https://ftc-docs.firstinspires.org/programming_resources/imu/imu.html. The `SensorIMU` Blocks sample was also updated to use the new `IMU` interface, and the following Java samples were added: - * `SensorIMUOrthogonal` - * Use this sample if your REV Hub is mounted so that it is parallel or perpendicular to the - bottom of your robot. - * `SensorIMUNonOrthogonal` - * Use this sample if your REV Hub is mounted to your robot in any other orientation - * `ConceptExploringIMUOrientations` - * This OpMode is a tool to help you understand how the orthogonal orientations work, and - which one applies to your robot. - * The BHI260AP IMU can only be accessed via the new `IMU` interface. The BNO055 IMU can be - programmed using the new `IMU` interface, or you can continue to program it using the old `BNO055IMU` - interface. If you want to be able to quickly switch to a new Control Hub that may contain the - BHI260AP IMU, you should migrate your code to use the new `IMU` interface. - * Unlike the old `BNO055IMU` interface, which only worked correctly when the REV Hub was mounted flat - on your robot, the `IMU` interface allows you to specify the orientation of the REV Hub on your - robot. It will account for this, and give you your orientation in a Robot Coordinate System, - instead of a special coordinate system for the REV Hub. As a result, your pitch and yaw will be - 0 when your *robot* is level, instead of when the REV Hub is level, which will result in much - more reliable orientation angle values for most mounting orientations. - * Because of the new robot-centric coordinate system, the pitch and roll angles returned by the - `IMU` interface will be different from the ones returned by the `BNO055IMU` interface. When you are - migrating your code, pay careful attention to the documentation. - * If you have calibrated your BNO055, you can provide that calibration data to the new `IMU` - interface by passing a `BNO055IMUNew.Parameters` instance to `IMU.initialize()`. - * The `IMU` interface is also suitable for implementation by third-party vendors for IMUs that - support providing the orientation in the form of a quaternion. -* Iterative `OpMode`s (as opposed to `LinearOpMode`s) now run on a dedicated thread. - * Cycle times should not be as impacted by everything else going on in the system. - * Slow `OpMode`s can no longer increase the amount of time it takes to process network commands, and vice versa. - * The `init()`, `init_loop()`, `start()` and `loop()` methods no longer need to return within a certain time frame. -* BNO055 IMU legacy driver: restores the ability to initialize in one OpMode, and then have another OpMode re-use that - initialization. This allows you to maintain the 0-yaw position between OpModes, if desired. -* Allows customized versions of device drivers in the FTC SDK to use the same XML tag. - * Before, if you wanted to customize a device driver, you had to copy it to a new class _and_ give - it a new XML tag. Giving it a new XML tag meant that to switch which driver was being used, you - had to modify your configuration file. - * Now, to use your custom driver, all you have to do is specify your custom driver's class when - calling `hardwareMap.get()`. To go back to the original driver, specify the original driver - class. If you specify an interface that is implemented by both the original driver and the - custom driver, there is no guarantee about which implementation will be returned. - -### Bug Fixes -* Fixes accessing the "Manage TensorFlow Lite Models" and "Manage Sounds" links and performing - Blocks and OnBotJava OpMode downloads from the REV Hardware Client. -* Fixes issue where an I2C device driver would be auto-initialized using the parameters assigned in - a previous OpMode run. -* Improves Driver Station popup menu placement in the landscape layout. -* Fixes NullPointerException when attempting to get a non-configured BNO055 IMU in a Blocks OpMode on an RC phone. -* Fixes problem with Blocks if a variable is named `orientation`. - -## Version 8.0 (20220907-131644) - -### Breaking Changes -* Increases the Robocol version. - * This means an 8.0 or later Robot Controller or Driver Station will not be able to communicate with a 7.2 or earlier Driver Station or Robot Controller. - * If you forget to update both apps at the same time, an error message will be shown explaining which app is older and should be updated. -* Initializing I2C devices now happens when you retrieve them from the `HardwareMap` for the first time. - * Previously, all I2C devices would be initialized before the OpMode even began executing, - whether you were actually going to use them or not. This could result in reduced performance and - unnecessary warnings. - * With this change, it is very important for Java users to retrieve all needed devices from the - `HardwareMap` **during the Init phase of the OpMode**. Namely, declare a variable for each hardware - device the OpMode will use, and assign a value to each. Do not do this during the Run phase, or your - OpMode may briefly hang while the devices you are retrieving get initialized. - * OpModes that do not use all of the I2C devices specified in the configuration file should take - less time to initialize. OpModes that do use all of the specified I2C devices should take the - same amount of time as previously. -* Fixes [issue #251](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/251) by changing the order in which axis rotation rates are read from the angular velocity vector in the BNO055 IMU driver. -* Deprecates `pitchMode` in `BNO055IMU.Parameters`. - * Setting `pitchMode` to `PitchMode.WINDOWS` would break the coordinate conventions used by the driver. -* Moves `OpModeManagerImpl` to the `com.qualcomm.robotcore.eventloop.opmode` package. - * This breaks third party libraries EasyOpenCV (version 1.5.1 and earlier) and FTC Dashboard (version 0.4.4 and earlier). -* Deletes the deprecated `OpMode` method `resetStartTime()` (use `resetRuntime()` instead). -* Deletes the protected `LinearOpMode.LinearOpModeHelper` class (which was not meant for use by OpModes). -* Removes I2C Device (Synchronous) config type (deprecated since 2018) - -### Enhancements -* Uncaught exceptions in OpModes no longer require a Restart Robot - * A blue screen popping up with a stacktrace is not an SDK error; this replaces the red text in the telemetry area. - * Since the very first SDK release, OpMode crashes have put the robot into "EMERGENCY STOP" state, only showing the first line of the exception, and requiring the user to press "Restart Robot" to continue - * Exceptions during an OpMode now open a popup window with the same color scheme as the log viewer, containing 15 lines of the exception stacktrace to allow easily tracing down the offending line without needing to connect to view logs over ADB or scroll through large amounts of logs in the log viewer. - * The exception text in the popup window is both zoomable and scrollable just like a webpage. - * Pressing the "OK" button in the popup window will return to the main screen of the Driver Station and allow an OpMode to be run again immediately, without the need to perform a "Restart Robot" -* Adds new Java sample to demonstrate using a hardware class to abstract robot actuators, and share them across multiple OpModes. - * Sample OpMode is [ConceptExternalHardwareClass.java](FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptExternalHardwareClass.java) - * Abstracted hardware class is [RobotHardware.java](FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotHardware.java) -* Updates RobotAutoDriveByGyro_Linear Java sample to use REV Control/Expansion hub IMU. -* Updates Vuforia samples to reference PowerPlay assets and have correct names and field locations of image targets. -* Updates TensorFlow samples to reference PowerPlay assets. -* Adds opt-in support for Java 8 language features to the OnBotJava editor. - * To opt in, open the OnBotJava Settings, and check `Enable beta Java 8 support`. - * Note that Java 8 code will only compile when the Robot Controller runs Android 7.0 Nougat or later. - * Please report issues [here](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues). -* In OnBotJava, clicking on build errors now correctly jumps to the correct location. -* Improves OnBotJava autocomplete behavior, to provide better completion options in most cases. -* Adds a QR code to the Robot Controller Inspection Report when viewed from the Driver Station for scanning by inspectors at competition. -* Improves I2C performance and reliability in some scenarios. - -## Version 7.2 (20220723-130006) - -### Breaking Changes -* Updates the build tooling. For Android Studio users, this change requires Android Studio Chipmunk 2021.2.1. -* Removes support for devices that are not competition legal, including Modern Robotics Core Control Modules, the Matrix Controller, and HiTechnic/NXT controllers and sensors. Support remains for Modern Robotics I2C sensors. - -### Enhancements -* Increases the height of the 3-dots Landscape menu touch area on the Driver Station, making it much easier to select. -* Adds `terminateOpModeNow()` method to allow OpModes to cleanly self-exit immediately. -* Adds `opModeInInit()` method to `LinearOpMode` to facilitate init-loops. Similar to `opModeIsActive()` but for the init phase. -* Warns user if they have a Logitech F310 gamepad connected that is set to DirectInput mode. -* Allows SPARKmini motor controllers to react more quickly to speed changes. -* Hides the version number of incorrectly installed sister app (i.e. DS installed on RC device or vice-versa) on inspection screen. -* Adds support for allowing the user to edit the comment for the runOpMode block. -* Adds parameterDefaultValues field to @ExportToBlocks. This provides the ability for a java method with an @ExportToBlocks annotation to specify default values for method parameters when it is shown in the block editor. -* Make LinearOpMode blocks more readable. The opmode name is displayed on the runOpMode block, but not on the other LinearOpMode blocks. -* Added support to TensorFlow Object Detection for using a different frame generator, instead of Vuforia. - Using Vuforia to pass the camera frame to TFOD is still supported. -* Removes usage of Renderscript. -* Fixes logspam on app startup of repeated stacktraces relating to `"Failed resolution of: Landroid/net/wifi/p2p/WifiP2pManager$DeviceInfoListener"` -* Allows disabling bluetooth radio from inspection screen -* Improves warning messages when I2C devices are not responding -* Adds support for controlling the RGB LED present on PS4/Etpark gamepads from OpModes -* Removes legacy Pushbot references from OpMode samples. Renames "Pushbot" samples to "Robot". Motor directions reversed to be compatible with "direct Drive" drive train. - - -### Bug fixes -* Fixes [issue #316](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/316) (MatrixF.inverted() returned an incorrectly-sized matrix for 1x1 and 2x2 matrixes). -* Self inspect now allows for Driver Station and Robot Controller compatibility between point releases. -* Fixes bug where if the same `RumbleEffect` object instance was queued for multiple gamepads, it - could happen that both rumble commands would be sent to just one gamepad. -* Fixes bug in Driver Station where on the Driver Hub, if Advanced Gamepad Features was disabled and - an officially supported gamepad was connected, then opening the Advanced Gamepad Features or - Gamepad Type Overrides screens would cause the gamepad to be rebound by the custom USB driver even - though advanced gamepad features was disabled. -* Protects against (unlikely) null pointer exception in Vuforia Localizer. -* Harden OnBotJava and Blocks saves to protect against save issues when disconnecting from Program and Manage -* Fixes issue where the RC app would hang if a REV Hub I2C write failed because the previous I2C - operation was still in progress. This hang most commonly occurred during REV 2M Distance Sensor initialization -* Removes ConceptWebcam.java sample program. This sample is not compatible with OnBotJava. -* Fixes bug where using html tags in an @ExportToBlocks comment field prevented the blocks editor from loading. -* Fixes blocks editor so it doesn't ask you to save when you haven't modified anything. -* Fixes uploading a very large blocks project to offline blocks editor. -* Fixes bug that caused blocks for DcMotorEx to be omitted from the blocks editor toolbox. -* Fixes [Blocks Programs Stripped of Blocks (due to using TensorFlow Label block)](https://ftcforum.firstinspires.org/forum/ftc-technology/blocks-programming/87035-blocks-programs-stripped-of-blocks) - -## Version 7.1 (20211223-120805) - -* Fixes crash when calling `isPwmEnabled()` ([issue #223](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/233)). -* Fixes lint error ([issue #4](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/4)). -* Fixes Driver Station crash when attempting to use DualShock4 v1 gamepad with Advanced Gamepad Features enabled ([issue #173](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/173)). -* Fixes possible (but unlikely) Driver Station crash when connecting gamepads of any type. -* Fixes bug where Driver Station would use generic 20% deadzone for Xbox360 and Logitech F310 gamepads when Advanced Gamepad Features was disabled. -* Added SimpleOmniDrive sample OpMode. -* Adds UVC white balance control API. -* Fixes [issue #259](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/259) Most blocks samples for TensorFlow can't be used for a different model. - * The blocks previously labeled TensorFlowObjectDetectionFreightFrenzy (from the subcategory named "Optimized for Freight Frenzy") and TensorFlowObjectDetectionCustomModel (from the subcategory named "Custom Model") have been replaced with blocks labeled TensorFlowObjectDetection. Blocks in existing opmodes will be automatically updated to the new blocks when opened in the blocks editor. -* Fixes [issue #260](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/260) Blocks can't call java method that has a VuforiaLocalizer parameter. - * Blocks now has a block labeled VuforiaFreightFrenzy.getVuforiaLocalizer for this. -* Added a page to manage the TensorFlow Lite models in /sdcard/FIRST/tflitemodels. To get to the TFLite Models page: - * You can click on the link at the bottom of the the Manage page. - * You can click on the link at the upper-right the Blocks project page. -* Fixes logspam when `isBusy()` is called on a motor not in RTP mode. -* Hides the "RC Password" item on the inspection screen for phone-based Robot Controllers. (It is only applicable for Control Hubs). -* Adds channel 165 to Wi-Fi Direct channel selection menu in the settings screen. (165 was previously available through the web UI, but not locally in the app). - -## Version 7.0 (20210915-141025) - -### Enhancements and New Features -* Adds support for external libraries to OnBotJava and Blocks. - * Upload .jar and .aar files in OnBotJava. - * Known limitation - RobotController device must be running Android 7.0 or greater. - * Known limitation - .aar files with assets are not supported. - * External libraries can provide support for hardware devices by using the annotation in the - com.qualcomm.robotcore.hardware.configuration.annotations package. - * External libraries can include .so files for native code. - * External libraries can be used from OnBotJava OpModes. - * External libraries that use the following annotations can be used from Blocks OpModes. - * org.firstinspires.ftc.robotcore.external.ExportClassToBlocks - * org.firstinspires.ftc.robotcore.external.ExportToBlocks - * External libraries that use the following annotations can add new hardware devices: - * com.qualcomm.robotcore.hardware.configuration.annotations.AnalogSensorType - * com.qualcomm.robotcore.hardware.configuration.annotations.DeviceProperties - * com.qualcomm.robotcore.hardware.configuration.annotations.DigitalIoDeviceType - * com.qualcomm.robotcore.hardware.configuration.annotations.I2cDeviceType - * com.qualcomm.robotcore.hardware.configuration.annotations.MotorType - * com.qualcomm.robotcore.hardware.configuration.annotations.ServoType - * External libraries that use the following annotations can add new functionality to the Robot Controller: - * org.firstinspires.ftc.ftccommon.external.OnCreate - * org.firstinspires.ftc.ftccommon.external.OnCreateEventLoop - * org.firstinspires.ftc.ftccommon.external.OnCreateMenu - * org.firstinspires.ftc.ftccommon.external.OnDestroy - * org.firstinspires.ftc.ftccommon.external.WebHandlerRegistrar -* Adds support for REV Robotics Driver Hub. -* Adds fully custom userspace USB gamepad driver to Driver Station (see "Advanced Gamepad Features" menu in DS settings). - * Allows gamepads to work on devices without native Linux kernel support (e.g. some Romanian Motorola devices). - * Allows the DS to read the unique serial number of each gamepad, enabling auto-recovery of dropped gamepads even if two gamepads of the same model drop. *(NOTE: unfortunately this does not apply to Etpark gamepads, because they do not have a unique serial)*. - * Reading the unique serial number also provides the ability to configure the DS to assign gamepads to a certain position by default (so no need to do start+a/b at all). - * The LED ring on the Xbox360 gamepad and the RGB LED bar on the PS4 gamepad is used to indicate the driver position the gamepad is bound to. - * The rumble motors on the Xbox360, PS4, and Etpark gamepads can be controlled from OpModes. - * The 2-point touchpad on the PS4 gamepad can be read from OpModes. - * The "back" and "guide" buttons on the gamepad can now be safely bound to robot controls (Previously, on many devices, Android would intercept these buttons as home button presses and close the app). - * Advanced Gamepad features are enabled by default, but may be disabled through the settings menu in order to revert to gamepad support provided natively by Android. -* Improves accuracy of ping measurement. - * Fixes issue where the ping time showed as being higher than reality when initially connecting to or restarting the robot. - * To see the full improvement, you must update both the Robot Controller and Driver Station apps. -* Updates samples located at [/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples](FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples). - * Added ConceptGamepadRumble and ConceptGamepadTouchpad samples to illustrate the use of these new gampad capabilities. - * Condensed existing Vuforia samples into just 2 samples (ConceptVuforiaFieldNavigation & ConceptVuforiaFieldNavigationWebcam) showing how to determine the robot's location on the field using Vuforia. These both use the current season's Target images. - * Added ConceptVuforiaDriveToTargetWebcam to illustrate an easy way to drive directly to any visible Vuforia target. -* Makes many improvements to the warning system and individual warnings. - * Warnings are now much more spaced out, so that they are easier to read. - * New warnings were added for conditions that should be resolved before competing. - * The mismatched apps warning now uses the major and minor app versions, not the version code. - * The warnings are automatically re-enabled when a Robot Controller app from a new FTC season is installed. -* Adds support for I2C transactions on the Expansion Hub / Control Hub without specifying a register address. - * See section 3 of the [TI I2C spec](https://www.ti.com/lit/an/slva704/slva704.pdf). - * Calling these new methods when using Modern Robotics hardware will result in an UnsupportedOperationException. -* Changes VuforiaLocalizer `close()` method to be public. -* Adds support for TensorFlow v2 object detection models. -* Reduces ambiguity of the Self Inspect language and graphics. -* OnBotJava now warns about potentially unintended file overwrites. -* Improves behavior of the Wi-Fi band and channel selector on the Manage webpage. - -### Bug fixes - * Fixes Robot Controller app crash on Android 9+ when a Driver Station connects. - * Fixes issue where an OpMode was responsible for calling shutdown on the - TensorFlow TFObjectDetector. Now this is done automatically. - * Fixes Vuforia initialization blocks to allow user to chose AxesOrder. Updated - relevant blocks sample opmodes. - * Fixes [FtcRobotController issue #114](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/114) - LED blocks and Java class do not work. - * Fixes match logging for OpModes that contain special characters in their names. - * Fixes Driver Station OpMode controls becoming unresponsive if the Driver Station was set to the landscape layout and an OnBotJava build was triggered while an OpMode was running. - * Fixes the Driver Station app closing itself when it is switched away from, or the screen is turned off. - * Fixes "black swirl of doom" (Infinite "configuring Wi-Fi Direct" message) on older devices. - * Updates the wiki comment on the OnBotJava intro page. - -## Version 6.2 (20210218-074821) - -### Enhancements -* Attempts to automatically fix the condition where a Control Hub's internal Expansion Hub is not - working by re-flashing its firmware -* Makes various improvements to the Wi-Fi Direct pairing screen, especially in landscape mode -* Makes the Robot Controller service no longer be categorically restarted when the main activity is brought to foreground - * (e.g. the service is no longer restarted simply by viewing the Self Inspect screen and pressing the back button) - * It is still restarted if the Settings menu or Configure Robot menu is opened - - -### Bug fixes -* Fixes [FtcRobotController issue #71](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/71) - Cannot open OpModes in v6.1 Blocks offline editor -* Fixes [FtcRobotController issue #79](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/79) - 6.1 causes a soft reboot on the Motorola E5 Play -* Fixes issue where the Control Hub OS's watchdog would restart the Robot Controller app if - the Control Hub was not able to communicate with its internal Expansion Hub -* Fixes certain I2C devices not showing up in the appropriate `HardwareMap` fields (such as `hardwareMap.colorSensor`) -* Fixes issue where performing a Wi-Fi factory reset on the Control Hub would not set the Wi-Fi band to 2.4 GHz -* Fixes issue where OnBotJava might fail to create a new file if the option to "Setup Code for Configured Hardware" was selected -* Fixes issue where performing certain operations after an OpMode crashes would temporarily break Control/Expansion Hub communication -* Fixes issue where a Control Hub with a configured USB-connected Expansion Hub would not work if the Expansion Hub was missing at startup -* Fixes potential issues caused by having mismatched Control/Expansion Hub firmware versions -* Fixes [ftc_app issue 673](https://github.com/ftctechnh/ftc_app/issues/673) Latest matchlog is being deleted instead of old ones by RobotLog -* Fixes ConceptVuforiaUltimateGoalNavigationWebcam sample opmode by correctly orienting camera on robot. -* Fixes issue where logcat would be spammed with InterruptedExceptions when stop is requested from the Driver Station (this behavior was accidentally introduced in v5.3). This change has no impact on functionality. -* Fixes issue where the blocks editor fails to load if the name of any TeleOp opmode contains an apostrophe. - -## Version 6.1 (20201209-113742) -* Makes the scan button on the configuration screen update the list of Expansion Hubs connected via RS-485 - * Fixes [SkyStone issue #143](https://github.com/FIRST-Tech-Challenge/SkyStone/issues/143) -* Improves web interface compatibility with older browser and Android System WebView versions. -* Fixes issue in UVC driver where some cameras (e.g. certain MS Lifecams) which reported frame intervals as rounded rather than truncated values (e.g. `666667*100ns` instead of `666666*100ns` for 15FPS) would fail to start streaming. -* Adds support in UVC driver for virtual PTZ control -* Adds support in UVC driver for gain (ISO) control -* Adds support in UVC driver for enabling/disable AE priority. This setting provides a means to tell the camera firmware either - * A) It can undershoot the requested frame rate in order to provide a theoretically better image (i.e. with a longer exposure than the inter-frame period of the selected frame rate allows) - * B) It *must* meet the inter-frame deadline for the selected frame rate, even if the image may be underexposed as a result -* Adds support for the Control Hub OS 1.1.2 Robot Controller watchdog - * The Robot Controller app will be restarted if it stops responding for more than 10 seconds -* Adds support for using the Driver Station app on Android 10+ -* Introduces an automatic TeleOp preselection feature - * For details and usage guide, please see [this wiki entry](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/Automatically-Loading-a-Driver-Controlled-Op-Mode) -* Shows icon next to OpMode name in the OpMode list dropdown on the Driver Station to indicate the source of the OpMode (i.e. the programming tool used to create it) -* Fixes issue where the Driver Station app would exit after displaying the Configuring Wi-Fi Direct screen -* Fixes Blocks and OnBotJava prompts when accessed via the REV Hardware Client - -## Version 6.0 (20200921-085816) - -### Important Notes -* Version 6.0 is the version for the Ultimate Goal season. -* Requires Android Studio 4.0. -* Android Studio users need to be connected to the Internet the first time they build the app (in order to download needed packages for the build). -* Version 5.5 was a moderately large off-season, August 2020, drop. It's worth reviewing those release notes below also. -* Version 5.5 and greater will not work on older Android 4.x and 5.x phones. Users must upgrade to an approved Android 6.x device or newer. -* The default PIDF values for REV motors have been reverted to the default PID values that were used in the 2018-2019 season - * This change was made because the 2018-2019 values turned out to work better for many mechanisms - * This brings the behavior of the REV motors in line with the behavior of all other motors - * If you prefer the 2019-2020 season's behavior for REV motors, here are the PIDF values that were in place, so that you can manually set them in your OpModes: -
- **HD Hex motors (all gearboxes):** - Velocity PIDF values: `P = 1.17`, `I = 0.117`, `F = 11.7` - Position PIDF values: `P = 5.0` - **Core Hex motor:** - Velocity PIDF values: `P = 4.96`, `I = 0.496`, `F = 49.6` - Position PIDF values: `P = 5.0` - -### New features -* Includes TensorFlow inference model and sample OpModes to detect Ultimate Goal Starter Stacks (four rings vs single ring stack). -* Includes Vuforia Ultimate Goal vision targets and sample OpModes. -* Introduces a digital zoom feature for TensorFlow object detection (to detect objects more accurately at greater distances). -* Adds configuration entry for the REV UltraPlanetary HD Hex motor - -### Enhancements -* Adds setGain() and getGain() methods to the NormalizedColorSensor interface - * By setting the gain of a color sensor, you can adjust for different lighting conditions. - For example, if you detect lower color values than expected, you can increase the gain. - * The gain value is only applied to the argb() and getNormalizedColors() methods, not to the raw color methods. - The getNormalizedColors() method is recommended for ease-of-use and clarity, since argb() has to be converted. - * Updates SensorColor Java sample to demonstrate gain usage -* Merges SensorREVColorDistance Java sample into SensorColor Java sample, which showcases best practices for all color sensors -* Improves retrieving values from the REV Color Sensor V3 - * Updates the normalization calculation of the RGB channels - * Improves the calculation of the alpha channel (can be used as an overall brightness indicator) - * Fixes the default sensor resolution, which caused issues with bright environments - * Adds support for changing the resolution and measuring rate of the Broadcom sensor chip - * Removes IR readings and calculations not meant for the Broadcom sensor chip - -### Bug fixes -* Improves reliability of BNO055IMU IMU initialization to prevent random initialization failures (which manifested as `Problem with 'imu'`). - -## Version 5.5 (20200824-090813) - -Version 5.5 requires Android Studio 4.0 or later. - -### New features -* Adds support for calling custom Java classes from Blocks OpModes (fixes [SkyStone issue #161](https://github.com/FIRST-Tech-Challenge/SkyStone/issues/161)). - * Classes must be in the org.firstinspires.ftc.teamcode package. - * To have easy access to the opMode, hardwareMap, telemetry, gamepad1, and gamepad2, classes can - extends org.firstinspires.ftc.robotcore.external.BlocksOpModeCompanion. - * Methods must be public static and have no more than 21 parameters. - * Methods must be annotated with org.firstinspires.ftc.robotcore.external.ExportToBlocks. - * Parameters declared as OpMode, LinearOpMode, Telemetry, and HardwareMap are supported and the - argument is provided automatically, regardless of the order of the parameters. On the block, - the sockets for those parameters are automatically filled in. - * Parameters declared as char or java.lang.Character will accept any block that returns text - and will only use the first character in the text. - * Parameters declared as boolean or java.lang.Boolean will accept any block that returns boolean. - * Parameters declared as byte, java.lang.Byte, short, java.lang.Short, int, java.lang.Integer, - long, or java.lang.Long, will accept any block that returns a number and will round that - value to the nearest whole number. - * Parameters declared as float, java.lang.Float, double, java.lang.Double will accept any - block that returns a number. -* Adds telemetry API method for setting display format - * Classic - * Monospace - * HTML (certain tags only) -* Adds blocks support for switching cameras. -* Adds Blocks support for TensorFlow Object Detection with a custom model. -* Adds support for uploading a custom TensorFlow Object Detection model in the Manage page, which - is especially useful for Blocks and OnBotJava users. -* Shows new Control Hub blink codes when the Wi-Fi band is switched using the Control Hub's button (only possible on Control Hub OS 1.1.2) -* Adds new warnings which can be disabled in the Advanced RC Settings - * Mismatched app versions warning - * Unnecessary 2.4 GHz Wi-Fi usage warning - * REV Hub is running outdated firmware (older than version 1.8.2) -* Adds support for Sony PS4 gamepad, and reworks how gamepads work on the Driver Station - * Removes preference which sets gamepad type based on driver position. Replaced with menu which allows specifying type for gamepads with unknown VID and PID - * Attempts to auto-detect gamepad type based on USB VID and PID - * If gamepad VID and PID is not known, use type specified by user for that VID and PID - * If gamepad VID and PID is not known AND the user has not specified a type for that VID and PID, an educated guess is made about how to map the gamepad -* Driver Station will now attempt to automatically recover from a gamepad disconnecting, and re-assign it to the position it was assigned to when it dropped - * If only one gamepad is assigned and it drops: it can be recovered - * If two gamepads are assigned, and have **different** VID/PID signatures, and only one drops: it will be recovered - * If two gamepads are assigned, and have **different** VID/PID signatures, and BOTH drop: both will be recovered - * If two gamepads are assigned, and have **the same** VID/PID signatures, and only one drops: it will be recovered - * If two gamepads are assigned, and have **the same** VID/PID signatures, and BOTH drop: **neither** will be recovered, because of the ambiguity of the gamepads when they re-appear on the USB bus. - * There is currently one known edge case: if there are **two** gamepads with **the same** VID/PID signature plugged in, **but only one is assigned**, and they BOTH drop, it's a 50-50 chance of which one will be chosen for automatic recovery to the assigned position: it is determined by whichever one is re-enumerated first by the USB bus controller. -* Adds landscape user interface to Driver Station - * New feature: practice timer with audio cues - * New feature (Control Hub only): wireless network connection strength indicator (0-5 bars) - * New feature (Control Hub only): tapping on the ping/channel display will switch to an alternate display showing radio RX dBm and link speed (tap again to switch back) - * The layout will NOT autorotate. You can switch the layout from the Driver Station's settings menu. -### Breaking changes -* Removes support for Android versions 4.4 through 5.1 (KitKat and Lollipop). The minSdkVersion is now 23. -* Removes the deprecated `LinearOpMode` methods `waitOneFullHardwareCycle()` and `waitForNextHardwareCycle()` -### Enhancements -* Handles RS485 address of Control Hub automatically - * The Control Hub is automatically given a reserved address - * Existing configuration files will continue to work - * All addresses in the range of 1-10 are still available for Expansion Hubs - * The Control Hub light will now normally be solid green, without blinking to indicate the address - * The Control Hub will not be shown on the Expansion Hub Address Change settings page -* Improves REV Hub firmware updater - * The user can now choose between all available firmware update files - * Version 1.8.2 of the REV Hub firmware is bundled into the Robot Controller app. - * Text was added to clarify that Expansion Hubs can only be updated via USB. - * Firmware update speed was reduced to improve reliability - * Allows REV Hub firmware to be updated directly from the Manage webpage -* Improves log viewer on Robot Controller - * Horizontal scrolling support (no longer word wrapped) - * Supports pinch-to-zoom - * Uses a monospaced font - * Error messages are highlighted - * New color scheme -* Attempts to force-stop a runaway/stuck OpMode without restarting the entire app - * Not all types of runaway conditions are stoppable, but if the user code attempts to talk to hardware during the runaway, the system should be able to capture it. -* Makes various tweaks to the Self Inspect screen - * Renames "OS version" entry to "Android version" - * Renames "Wi-Fi Direct Name" to "Wi-Fi Name" - * Adds Control Hub OS version, when viewing the report of a Control Hub - * Hides the airplane mode entry, when viewing the report of a Control Hub - * Removes check for ZTE Speed Channel Changer - * Shows firmware version for **all** Expansion and Control Hubs -* Reworks network settings portion of Manage page - * All network settings are now applied with a single click - * The Wi-Fi Direct channel of phone-based Robot Controllers can now be changed from the Manage page - * Wi-Fi channels are filtered by band (2.4 vs 5 GHz) and whether they overlap with other channels - * The current Wi-Fi channel is pre-selected on phone-based Robot Controllers, and Control Hubs running OS 1.1.2 or later. - * On Control Hubs running OS 1.1.2 or later, you can choose to have the system automatically select a channel on the 5 GHz band -* Improves OnBotJava - * New light and dark themes replace the old themes (chaos, github, chrome,...) - * the new default theme is `light` and will be used when you first update to this version - * OnBotJava now has a tabbed editor - * Read-only offline mode -* Improves function of "exit" menu item on Robot Controller and Driver Station - * Now guaranteed to be fully stopped and unloaded from memory -* Shows a warning message if a LinearOpMode exists prematurely due to failure to monitor for the start condition -* Improves error message shown when the Driver Station and Robot Controller are incompatible with each other -* Driver Station OpMode Control Panel now disabled while a Restart Robot is in progress -* Disables advanced settings related to Wi-Fi Direct when the Robot Controller is a Control Hub. -* Tint phone battery icons on Driver Station when low/critical. -* Uses names "Control Hub Portal" and "Control Hub" (when appropriate) in new configuration files -* Improve I2C read performance - * Very large improvement on Control Hub; up to ~2x faster with small (e.g. 6 byte) reads - * Not as apparent on Expansion Hubs connected to a phone -* Update/refresh build infrastructure - * Update to 'androidx' support library from 'com.android.support:appcompat', which is end-of-life - * Update targetSdkVersion and compileSdkVersion to 28 - * Update Android Studio's Android plugin to latest - * Fix reported build timestamp in 'About' screen -* Add sample illustrating manual webcam use: ConceptWebcam - - -### Bug fixes -* Fixes [SkyStone issue #248](https://github.com/FIRST-Tech-Challenge/SkyStone/issues/248) -* Fixes [SkyStone issue #232](https://github.com/FIRST-Tech-Challenge/SkyStone/issues/232) and - modifies bulk caching semantics to allow for cache-preserving MANUAL/AUTO transitions. -* Improves performance when REV 2M distance sensor is unplugged -* Improves readability of Toast messages on certain devices -* Allows a Driver Station to connect to a Robot Controller after another has disconnected -* Improves generation of fake serial numbers for UVC cameras which do not provide a real serial number - * Previously some devices would assign such cameras a serial of `0:0` and fail to open and start streaming - * Fixes [ftc_app issue #638](https://github.com/ftctechnh/ftc_app/issues/638). -* Fixes a slew of bugs with the Vuforia camera monitor including: - * Fixes bug where preview could be displayed with a wonky aspect ratio - * Fixes bug where preview could be cut off in landscape - * Fixes bug where preview got totally messed up when rotating phone - * Fixes bug where crosshair could drift off target when using webcams -* Fixes issue in UVC driver on some devices ([ftc_app 681](https://github.com/ftctechnh/ftc_app/issues/681)) if streaming was started/stopped multiple times in a row - * Issue manifested as kernel panic on devices which do not have [this kernel patch](https://lore.kernel.org/patchwork/patch/352400/). - * On affected devices which **do** have the patch, the issue was manifest as simply a failure to start streaming. - * The Tech Team believes that the root cause of the issue is a bug in the Linux kernel XHCI driver. A workaround was implemented in the SDK UVC driver. -* Fixes bug in UVC driver where often half the frames from the camera would be dropped (e.g. only 15FPS delivered during a streaming session configured for 30FPS). -* Fixes issue where TensorFlow Object Detection would show results whose confidence was lower than - the minimum confidence parameter. -* Fixes a potential exploitation issue of [CVE-2019-11358](https://www.cvedetails.com/cve/CVE-2019-11358/) in OnBotJava -* Fixes changing the address of an Expansion Hub with additional Expansion Hubs connected to it -* Preserves the Control Hub's network connection when "Restart Robot" is selected -* Fixes issue where device scans would fail while the Robot was restarting -* Fix RenderScript usage - * Use androidx.renderscript variant: increased compatibility - * Use RenderScript in Java mode, not native: simplifies build -* Fixes webcam-frame-to-bitmap conversion problem: alpha channel wasn't being initialized, only R, G, & B -* Fixes possible arithmetic overflow in Deadline -* Fixes deadlock in Vuforia webcam support which could cause 5-second delays when stopping OpMode - -## Version 5.4 (20200108-101156) -* Fixes [SkyStone issue #88](https://github.com/FIRST-Tech-Challenge/SkyStone/issues/88) -* Adds an inspection item that notes when a robot controller (Control Hub) is using the factory default password. -* Fixes [SkyStone issue #61](https://github.com/FIRST-Tech-Challenge/SkyStone/issues/61) -* Fixes [SkyStone issue #142](https://github.com/FIRST-Tech-Challenge/SkyStone/issues/142) -* Fixes [ftc_app issue #417](https://github.com/ftctechnh/ftc_app/issues/417) by adding more current and voltage monitoring capabilities for REV Hubs. -* Fixes [a crash sometimes caused by OnBotJava activity](https://ftcforum.firstinspires.org/forum/ftc-technology/76217-onbotjava-crashes-robot-controller) -* Improves OnBotJava autosave functionality [ftc_app #738](https://github.com/ftctechnh/ftc_app/issues/738) -* Fixes system responsiveness issue when an Expansion Hub is disconnected -* Fixes issue where IMU initialization could prevent OpModes from stopping -* Fixes issue where AndroidTextToSpeech.speak() would fail if it was called too early -* Adds telemetry.speak() methods and blocks, which cause the Driver Station (if also updated) to speak text -* Adds and improves Expansion Hub-related warnings - * Improves Expansion Hub low battery warning - * Displays the warning immediately after the hub reports it - * Specifies whether the condition is current or occurred temporarily during an OpMode run - * Displays which hubs reported low battery - * Displays warning when hub loses and regains power during an OpMode run - * Fixes the hub's LED pattern after this condition - * Displays warning when Expansion Hub is not responding to commands - * Specifies whether the condition is current or occurred temporarily during an OpMode run - * Clarifies warning when Expansion Hub is not present at startup - * Specifies that this condition requires a Robot Restart before the hub can be used. - * The hub light will now accurately reflect this state - * Improves logging and reduces log spam during these conditions -* Syncs the Control Hub time and timezone to a connected web browser programming the robot, if a Driver Station is not available. -* Adds bulk read functionality for REV Hubs - * A bulk caching mode must be set at the Hub level with `LynxModule#setBulkCachingMode()`. This applies to all relevant SDK hardware classes that reference that Hub. - * The following following Hub bulk caching modes are available: - * `BulkCachingMode.OFF` (default): All hardware calls operate as usual. Bulk data can read through `LynxModule#getBulkData()` and processed manually. - * `BulkCachingMode.AUTO`: Applicable hardware calls are served from a bulk read cache that is cleared/refreshed automatically to ensure identical commands don't hit the same cache. The cache can also be cleared manually with `LynxModule#clearBulkCache()`, although this is not recommended. - * (advanced users) `BulkCachingMode.MANUAL`: Same as `BulkCachingMode.AUTO` except the cache is never cleared automatically. To avoid getting stale data, the cache must be manually cleared at the beginning of each loop body or as the user deems appropriate. -* Removes PIDF Annotation values added in Rev 5.3 (to AndyMark, goBILDA and TETRIX motor configurations). - * The new motor types will still be available but their Default control behavior will revert back to Rev 5.2 -* Adds new `ConceptMotorBulkRead` sample Opmode to demonstrate and compare Motor Bulk-Read modes for reducing I/O latencies. - -## Version 5.3 (20191004-112306) -* Fixes external USB/UVC webcam support -* Makes various bugfixes and improvements to Blocks page, including but not limited to: - * Many visual tweaks - * Browser zoom and window resize behave better - * Resizing the Java preview pane works better and more consistently across browsers - * The Java preview pane consistently gets scrollbars when needed - * The Java preview pane is hidden by default on phones - * Internet Explorer 11 should work - * Large dropdown lists display properly on lower res screens - * Disabled buttons are now visually identifiable as disabled - * A warning is shown if a user selects a TFOD sample, but their device is not compatible - * Warning messages in a Blocks OpMode are now visible by default. -* Adds goBILDA 5201 and 5202 motors to Robot Configurator -* Adds PIDF Annotation values to AndyMark, goBILDA and TETRIX motor configurations. - This has the effect of causing the RUN_USING_ENCODERS and RUN_TO_POSITION modes to use - PIDF vs PID closed loop control on these motors. This should provide more responsive, yet stable, speed control. - PIDF adds Feedforward control to the basic PID control loop. - Feedforward is useful when controlling a motor's speed because it "anticipates" how much the control voltage - must change to achieve a new speed set-point, rather than requiring the integrated error to change sufficiently. - The PIDF values were chosen to provide responsive, yet stable, speed control on a lightly loaded motor. - The more heavily a motor is loaded (drag or friction), the more noticable the PIDF improvement will be. -* Fixes startup crash on Android 10 -* Fixes [ftc_app issue #712](https://github.com/ftctechnh/ftc_app/issues/712) (thanks to FROGbots-4634) -* Fixes [ftc_app issue #542](https://github.com/ftctechnh/ftc_app/issues/542) -* Allows "A" and lowercase letters when naming device through RC and DS apps. - -## Version 5.2 (20190905-083277) -* Fixes extra-wide margins on settings activities, and placement of the new configuration button -* Adds Skystone Vuforia image target data. - * Includes sample Skystone Vuforia Navigation OpModes (Java). - * Includes sample Skystone Vuforia Navigation OpModes (Blocks). -* Adds TensorFlow inference model (.tflite) for Skystone game elements. - * Includes sample Skystone TensorFlow OpModes (Java). - * Includes sample Skystone TensorFlow OpModes (Blocks). -* Removes older (season-specific) sample OpModes. -* Includes 64-bit support (to comply with [Google Play requirements](https://android-developers.googleblog.com/2019/01/get-your-apps-ready-for-64-bit.html)). -* Protects against Stuck OpModes when a Restart Robot is requested. (Thanks to FROGbots-4634) ([ftc_app issue #709](https://github.com/ftctechnh/ftc_app/issues/709)) -* Blocks related changes: - * Fixes bug with blocks generated code when hardware device name is a java or javascript reserved word. - * Shows generated java code for blocks, even when hardware items are missing from the active configuration. - * Displays warning icon when outdated Vuforia and TensorFlow blocks are used ([SkyStone issue #27](https://github.com/FIRST-Tech-Challenge/SkyStone/issues/27)) - -## Version 5.1 (20190820-222104) -* Defines default PIDF parameters for the following motors: - * REV Core Hex Motor - * REV 20:1 HD Hex Motor - * REV 40:1 HD Hex Motor -* Adds back button when running on a device without a system back button (such as a Control Hub) -* Allows a REV Control Hub to update the firmware on a REV Expansion Hub via USB -* Fixes [SkyStone issue #9](https://github.com/FIRST-Tech-Challenge/SkyStone/issues/9) -* Fixes [ftc_app issue #715](https://github.com/ftctechnh/ftc_app/issues/715) -* Prevents extra DS User clicks by filtering based on current state. -* Prevents incorrect DS UI state changes when receiving new OpMode list from RC -* Adds support for REV Color Sensor V3 -* Adds a manual-refresh DS Camera Stream for remotely viewing RC camera frames. - * To show the stream on the DS, initialize **but do not run** a stream-enabled opmode, select the Camera Stream option in the DS menu, and tap the image to refresh. This feature is automatically enabled when using Vuforia or TFOD—no additional RC configuration is required for typical use cases. To hide the stream, select the same menu item again. - * Note that gamepads are disabled and the selected opmode cannot be started while the stream is open as a safety precaution. - * To use custom streams, consult the API docs for `CameraStreamServer#setSource` and `CameraStreamSource`. -* Adds many Star Wars sounds to RobotController resources. -* Added Skystone Sounds Chooser Sample Program. -* Switches out startup, connect chimes, and error/warning sounds for Star Wars sounds -* Updates OnBot Java to use a WebSocket for communication with the robot - * The OnBot Java page no longer has to do a full refresh when a user switches from editing one file to another - -Known issues: -* Camera Stream - * The Vuforia camera stream inherits the issues present in the phone preview (namely [ftc_app issue #574](https://github.com/ftctechnh/ftc_app/issues/574)). This problem does not affect the TFOD camera stream even though it receives frames from Vuforia. - * The orientation of the stream frames may not always match the phone preview. For now, these frames may be rotated manually via a custom `CameraStreamSource` if desired. -* OnBotJava - * Browser back button may not always work correctly - * It's possible for a build to be queued, but not started. The OnBot Java build console will display a warning if this occurs. - * A user might not realize they are editing a different file if the user inadvertently switches from one file to another since this switch is now seamless. The name of the currently open file is displayed in the browser tab. - -## Version 5.0 (built on 19.06.14) - * Support for the REV Robotics Control Hub. - * Adds a Java preview pane to the Blocks editor. - * Adds a new offline export feature to the Blocks editor. - * Display Wi-Fi channel in Network circle on Driver Station. - * Adds calibration for Logitech C270 - * Updates build tooling and target SDK. - * Compliance with Google's permissions infrastructure (Required after build tooling update). - * Keep Alives to mitigate the Motorola Wi-Fi scanning problem. Telemetry substitute no longer necessary. - * Improves Vuforia error reporting. - * Fixes ftctechnh/ftc_app issues 621, 713. - * Miscellaneous bug fixes and improvements. - -## Version 4.3 (built on 18.10.31) - * Includes missing TensorFlow-related libraries and files. - -## Version 4.2 (built on 18.10.30) - * Includes fix to avoid deadlock situation with WatchdogMonitor which could result in USB communication errors. - - Comm error appeared to require that user disconnect USB cable and restart the Robot Controller app to recover. - - robotControllerLog.txt would have error messages that included the words "E RobotCore: lynx xmit lock: #### abandoning lock:" - * Includes fix to correctly list the parent module address for a REV Robotics Expansion Hub in a configuration (.xml) file. - - Bug in versions 4.0 and 4.1 would incorrect list the address module for a parent REV Robotics device as "1". - - If the parent module had a higher address value than the daisy-chained module, then this bug would prevent the Robot Controller from communicating with the downstream Expansion Hub. - * Added requirement for ACCESS_COARSE_LOCATION to allow a Driver Station running Android Oreo to scan for Wi-Fi Direct devices. - * Added google() repo to build.gradle because aapt2 must be downloaded from the google() repository beginning with version 3.2 of the Android Gradle Plugin. - - Important Note: Android Studio users will need to be connected to the Internet the first time build the ftc_app project. - - Internet connectivity is required for the first build so the appropriate files can be downloaded from the Google repository. - - Users should not need to be connected to the Internet for subsequent builds. - - This should also fix buid issue where Android Studio would complain that it "Could not find com.android.tools.lint:lint-gradle:26.1.4" (or similar). - * Added support for REV Spark Mini motor controller as part of the configuration menu for a servo/PWM port on the REV Expansion Hub. - * Provide examples for playing audio files in an OpMode. - * Block Development Tool Changes - - Includes a fix for a problem with the Velocity blocks that were reported in the FTC Technology forum (Blocks Programming subforum). - - Change the "Save completed successfully." message to a white color so it will contrast with a green background. - - Fixed the "Download image" feature so it will work if there are text blocks in the OpMode. - * Introduce support for Google's TensorFlow Lite technology for object detetion for 2018-2019 game. - - TensorFlow lite can recognize Gold Mineral and Silver Mineral from 2018-2019 game. - - Example Java and Block OpModes are included to show how to determine the relative position of the gold block (left, center, right). - -## Version 4.1 (released on 18.09.24) - -Changes include: - * Fix to prevent crash when deprecated configuration annotations are used. - * Change to allow FTC Robot Controller APK to be auto-updated using FIRST Global Control Hub update scripts. - * Removed samples for non supported / non legal hardware. - * Improvements to Telemetry.addData block with "text" socket. - * Updated Blocks sample OpMode list to include Rover Ruckus Vuforia example. - * Update SDK library version number. - -## Version 4.0 (released on 18.09.12) - -Changes include: - * Initial support for UVC compatible cameras - - If UVC camera has a unique serial number, RC will detect and enumerate by serial number. - - If UVC camera lacks a unique serial number, RC will only support one camera of that type connected. - - Calibration settings for a few cameras are included (see TeamCode/src/main/res/xml/teamwebcamcalibrations.xml for details). - - User can upload calibration files from Program and Manage web interface. - - UVC cameras seem to draw a fair amount of electrical current from the USB bus. - + This does not appear to present any problems for the REV Robotics Control Hub. - + This does seem to create stability problems when using some cameras with an Android phone-based Robot Controller. - + FTC Tech Team is investigating options to mitigate this issue with the phone-based Robot Controllers. - - Updated sample Vuforia Navigation and VuMark OpModes to demonstrate how to use an internal phone-based camera and an external UVC webcam. - - * Support for improved motor control. - - REV Robotics Expansion Hub firmware 1.8 and greater will support a feed forward mechanism for closed loop motor control. - - FTC SDK has been modified to support PIDF coefficients (proportional, integral, derivative, and feed forward). - - FTC Blocks development tool modified to include PIDF programming blocks. - - Deprecated older PID-related methods and variables. - - REV's 1.8.x PIDF-related changes provide a more linear and accurate way to control a motor. - - * Wireless - - Added 5GHz support for wireless channel changing for those devices that support it. - + Tested with Moto G5 and E4 phones. - + Also tested with other (currently non-approved) phones such as Samsung Galaxy S8. - -* Improved Expansion Hub firmware update support in Robot Controller app - - Changes to make the system more robust during the firmware update process (when performed through Robot Controller app). - - User no longer has to disconnect a downstream daisy-chained Expansion Hub when updating an Expansion Hub's firmware. - + If user is updating an Expansion Hub's firmware through a USB connection, he/she does not have to disconnect RS485 connection to other Expansion Hubs. - + The user still must use a USB connection to update an Expansion Hub's firmware. - + The user cannot update the Expansion Hub firmware for a downstream device that is daisy chained through an RS485 connection. - - If an Expansion Hub accidentally gets "bricked" the Robot Controller app is now more likely to recognize the Hub when it scans the USB bus. - + Robot Controller app should be able to detect an Expansion Hub, even if it accidentally was bricked in a previous update attempt. - + Robot Controller app should be able to install the firmware onto the Hub, even if if accidentally was bricked in a previous update attempt. - - * Resiliency - - FTC software can detect and enable an FTDI reset feature that is available with REV Robotics v1.8 Expansion Hub firmware and greater. - + When enabled, the Expansion Hub can detect if it hasn't communicated with the Robot Controller over the FTDI (USB) connection. - + If the Hub hasn't heard from the Robot Controller in a while, it will reset the FTDI connection. - + This action helps system recover from some ESD-induced disruptions. - - Various fixes to improve reliability of FTC software. - - * Blocks - - Fixed errors with string and list indices in blocks export to java. - - Support for USB connected UVC webcams. - - Refactored optimized Blocks Vuforia code to support Rover Ruckus image targets. - - Added programming blocks to support PIDF (proportional, integral, derivative and feed forward) motor control. - - Added formatting options (under Telemetry and Miscellaneous categories) so user can set how many decimal places to display a numerical value. - - Support to play audio files (which are uploaded through Blocks web interface) on Driver Station in addition to the Robot Controller. - - Fixed bug with Download Image of Blocks feature. - - Support for REV Robotics Blinkin LED Controller. - - Support for REV Robotics 2m Distance Sensor. - - Added support for a REV Touch Sensor (no longer have to configure as a generic digital device). - - Added blocks for DcMotorEx methods. - + These are enhanced methods that you can use when supported by the motor controller hardware. - + The REV Robotics Expansion Hub supports these enhanced methods. - + Enhanced methods include methods to get/set motor velocity (in encoder pulses per second), get/set PIDF coefficients, etc.. - - * Modest Improvements in Logging - - Decrease frequency of battery checker voltage statements. - - Removed non-FTC related log statements (wherever possible). - - Introduced a "Match Logging" feature. - + Under "Settings" a user can enable/disable this feature (it's disabled by default). - + If enabled, user provides a "Match Number" through the Driver Station user interface (top of the screen). - * The Match Number is used to create a log file specifically with log statements from that particular OpMode run. - * Match log files are stored in /sdcard/FIRST/matlogs on the Robot Controller. - * Once an OpMode run is complete, the Match Number is cleared. - * This is a convenient way to create a separate match log with statements only related to a specific OpMode run. - - * New Devices - - Support for REV Robotics Blinkin LED Controller. - - Support for REV Robotics 2m Distance Sensor. - - Added configuration option for REV 20:1 HD Hex Motor. - - Added support for a REV Touch Sensor (no longer have to configure as a generic digital device). - - * Miscellaneous - - Fixed some errors in the definitions for acceleration and velocity in our javadoc documentation. - - Added ability to play audio files on Driver Station - - When user is configuring an Expansion Hub, the LED on the Expansion Hub will change blink pattern (purple-cyan) to indicate which Hub is currently being configured. - - Renamed I2cSensorType to I2cDeviceType. - - Added an external sample OpMode that demonstrates localization using 2018-2019 (Rover Ruckus presented by QualComm) Vuforia targets. - - Added an external sample OpMode that demonstrates how to use the REV Robotics 2m Laser Distance Sensor. - - Added an external sample OpMode that demonstrates how to use the REV Robotics Blinkin LED Controller. - - Re-categorized external Java sample OpModes to "TeleOp" instead of "Autonomous". - -Known issues: - * Initial support for UVC compatible cameras - - UVC cameras seem to draw significant amount of current from the USB bus. - + This does not appear to present any problems for the REV Robotics Control Hub. - + This does seem to create stability problems when using some cameras with an Android phone-based Robot Controller. - + FTC Tech Team is investigating options to mitigate this issue with the phone-based Robot Controllers. - - There might be a possible deadlock which causes the RC to become unresponsive when using a UVC webcam with a Nougat Android Robot Controller. - - * Wireless - - When user selects a wireless channel, this channel does not necessarily persist if the phone is power cycled. - + Tech Team is hoping to eventually address this issue in a future release. - + Issue has been present since apps were introduced (i.e., it is not new with the v4.0 release). - - Wireless channel is not currently displayed for Wi-Fi Direct connections. - - * Miscellaneous - - The blink indication feature that shows which Expansion Hub is currently being configured does not work for a newly created configuration file. - + User has to first save a newly created configuration file and then close and re-edit the file in order for blink indicator to work. - -## Version 3.6 (built on 17.12.18) - -Changes include: - * Blocks Changes - - Uses updated Google Blockly software to allow users to edit their OpModes on Apple iOS devices (including iPad and iPhone). - - Improvement in Blocks tool to handle corrupt OpMode files. - - Autonomous OpModes should no longer get switched back to tele-op after re-opening them to be edited. - - The system can now detect type mismatches during runtime and alert the user with a message on the Driver Station. - * Updated javadoc documentation for setPower() method to reflect correct range of values (-1 to +1). - * Modified VuforiaLocalizerImpl to allow for user rendering of frames - - Added a user-overrideable onRenderFrame() method which gets called by the class's renderFrame() method. - -## Version 3.5 (built on 17.10.30) - -Changes with version 3.5 include: - * Introduced a fix to prevent random OpMode stops, which can occur after the Robot Controller app has been paused and then resumed (for example, when a user temporarily turns off the display of the Robot Controller phone, and then turns the screen back on). - * Introduced a fix to prevent random OpMode stops, which were previously caused by random peer disconnect events on the Driver Station. - * Fixes issue where log files would be closed on pause of the RC or DS, but not re-opened upon resume. - * Fixes issue with battery handler (voltage) start/stop race. - * Fixes issue where Android Studio generated OpModes would disappear from available list in certain situations. - * Fixes problem where OnBot Java would not build on REV Robotics Control Hub. - * Fixes problem where OnBot Java would not build if the date and time on the Robot Controller device was "rewound" (set to an earlier date/time). - * Improved error message on OnBot Java that occurs when renaming a file fails. - * Removed unneeded resources from android.jar binaries used by OnBot Java to reduce final size of Robot Controller app. - * Added MR_ANALOG_TOUCH_SENSOR block to Blocks Programming Tool. - -## Version 3.4 (built on 17.09.06) - -Changes with version 3.4 include: - * Added telemetry.update() statement for BlankLinearOpMode template. - * Renamed sample Block OpModes to be more consistent with Java samples. - * Added some additional sample Block OpModes. - * Reworded OnBot Java readme slightly. - -## Version 3.3 (built on 17.09.04) - -This version of the software includes improves for the FTC Blocks Programming Tool and the OnBot Java Programming Tool. - -Changes with verion 3.3 include: - * Android Studio ftc_app project has been updated to use Gradle Plugin 2.3.3. - * Android Studio ftc_app project is already using gradle 3.5 distribution. - * Robot Controller log has been renamed to /sdcard/RobotControllerLog.txt (note that this change was actually introduced w/ v3.2). - * Improvements in I2C reliability. - * Optimized I2C read for REV Expansion Hub, with v1.7 firmware or greater. - * Updated all external/samples (available through OnBot and in Android project folder). - * Vuforia - - Added support for VuMarks that will be used for the 2017-2018 season game. - * Blocks - - Update to latest Google Blockly release. - - Sample OpModes can be selected as a template when creating new OpMode. - - Fixed bug where the blocks would disappear temporarily when mouse button is held down. - - Added blocks for Range.clip and Range.scale. - - User can now disable/enable Block OpModes. - - Fix to prevent occasional Blocks deadlock. - * OnBot Java - - Significant improvements with autocomplete function for OnBot Java editor. - - Sample OpModes can be selected as a template when creating new OpMode. - - Fixes and changes to complete hardware setup feature. - - Updated (and more useful) onBot welcome message. - -Known issues: - * Android Studio - - After updating to the new v3.3 Android Studio project folder, if you get error messages indicating "InvalidVirtualFileAccessException" then you might need to do a File->Invalidate Caches / Restart to clear the error. - * OnBot Java - - Sometimes when you push the build button to build all OpModes, the RC returns an error message that the build failed. If you press the build button a second time, the build typically suceeds. - -## Version 3.2 (built on 17.08.02) - -This version of the software introduces the "OnBot Java" Development Tool. Similar to the FTC Blocks Development Tool, the FTC OnBot Java Development Tool allows a user to create, edit and build OpModes dynamically using only a Javascript-enabled web browser. - -The OnBot Java Development Tool is an integrated development environment (IDE) that is served up by the Robot Controller. OpModes are created and edited using a Javascript-enabled browser (Google Chromse is recommended). OpModes are saved on the Robot Controller Android device directly. - -The OnBot Java Development Tool provides a Java programming environment that does NOT need Android Studio. - - - -Changes with version 3.2 include: - * Enhanced web-based development tools - - Introduction of OnBot Java Development Tool. - - Web-based programming and management features are "always on" (user no longer needs to put Robot Controller into programming mode). - - Web-based management interface (where user can change Robot Controller name and also easily download Robot Controller log file). - - OnBot Java, Blocks and Management features available from web based interface. - -* Blocks Programming Development Tool: - - Changed "LynxI2cColorRangeSensor" block to "REV Color/range sensor" block. - - Fixed tooltip for ColorSensor.isLightOn block. - Added blocks for ColorSensor.getNormalizedColors and LynxI2cColorRangeSensor.getNormalizedColors. - -* Added example OpModes for digital touch sensor and REV Robotics Color Distance sensor. -* User selectable color themes. -* Includes many minor enhancements and fixes (too numerous to list). - -Known issues: -* Auto complete function is incomplete and does not support the following (for now): - - Access via *this* keyword - - Access via *super* keyword - - Members of the super cloass, not overridden by the class - - Any methods provided in the current class - - Inner classes - - Can't handle casted objects - - Any objects coming from an parenthetically enclosed expression - -## Version 3.10 (built on 17.05.09) - -This version of the software provides support for the REV Robotics Expansion Hub. This version also includes improvements in the USB communication layer in an effort to enhance system resiliency. If you were using a 2.x version of the software previously, updating to version 3.1 requires that you also update your Driver Station software in addition to updating the Robot Controller software. - -Also note that in version 3.10 software, the setMaxSpeed and getMaxSpeed methods are no longer available (not deprecated, they have been removed from the SDK). Also note that the the new 3.x software incorporates motor profiles that a user can select as he/she configures the robot. - -Changes include: - * Blocks changes - - Added VuforiaTrackableDefaultListener.getPose and Vuforia.trackPose blocks. - - Added optimized blocks support for Vuforia extended tracking. - - Added atan2 block to the math category. - - Added useCompetitionFieldTargetLocations parameter to Vuforia.initialize block. If set to false, the target locations are placed at (0,0,0) with target orientation as specified in https://github.com/gearsincorg/FTCVuforiaDemo/blob/master/Robot_Navigation.java tutorial OpMode. - * Incorporates additional improvements to USB comm layer to improve system resiliency (to recover from a greater number of communication disruptions). - -************************************************************************************** - -Additional Notes Regarding Version 3.00 (built on 17.04.13) - -In addition to the release changes listed below (see section labeled "Version 3.00 (built on 17.04.013)"), version 3.00 has the following important changes: - -1. Version 3.00 software uses a new version of the FTC Robocol (robot protocol). If you upgrade to v3.0 on the Robot Controller and/or Android Studio side, you must also upgrade the Driver Station software to match the new Robocol. -2. Version 3.00 software removes the setMaxSpeed and getMaxSpeed methods from the DcMotor class. If you have an OpMode that formerly used these methods, you will need to remove the references/calls to these methods. Instead, v3.0 provides the max speed information through the use of motor profiles that are selected by the user during robot configuration. -3. Version 3.00 software currently does not have a mechanism to disable extra i2c sensors. We hope to re-introduce this function with a release in the near future. - -************************************************************************************** - -## Version 3.00 (built on 17.04.13) - -*** Use this version of the software at YOUR OWN RISK!!! *** - -This software is being released as an "alpha" version. Use this version at your own risk! - -This pre-release software contains SIGNIFICANT changes, including changes to the Wi-Fi Direct pairing mechanism, rewrites of the I2C sensor classes, changes to the USB/FTDI layer, and the introduction of support for the REV Robotics Expansion Hub and the REV Robotics color-range-light sensor. These changes were implemented to improve the reliability and resiliency of the FTC control system. - -Please note, however, that version 3.00 is considered "alpha" code. This code is being released so that the FIRST community will have an opportunity to test the new REV Expansion Hub electronics module when it becomes available in May. The developers do not recommend using this code for critical applications (i.e., competition use). - -*** Use this version of the software at YOUR OWN RISK!!! *** - -Changes include: - * Major rework of sensor-related infrastructure. Includes rewriting sensor classes to implement synchronous I2C communication. - * Fix to reset Autonomous timer back to 30 seconds. - * Implementation of specific motor profiles for approved 12V motors (includes Tetrix, AndyMark, Matrix and REV models). - * Modest improvements to enhance Wi-Fi P2P pairing. - * Fixes telemetry log addition race. - * Publishes all the sources (not just a select few). - * Includes Block programming improvements - - Addition of optimized Vuforia blocks. - - Auto scrollbar to projects and sounds pages. - - Fixed blocks paste bug. - - Blocks execute after while-opModeIsActive loop (to allow for cleanup before exiting OpMode). - - Added gyro integratedZValue block. - - Fixes bug with projects page for Firefox browser. - - Added IsSpeaking block to AndroidTextToSpeech. - * Implements support for the REV Robotics Expansion Hub - - Implements support for integral REV IMU (physically installed on I2C bus 0, uses same Bosch BNO055 9 axis absolute orientation sensor as Adafruit 9DOF abs orientation sensor). - Implements support for REV color/range/light sensor. - - Provides support to update Expansion Hub firmware through FTC SDK. - - Detects REV firmware version and records in log file. - - Includes support for REV Control Hub (note that the REV Control Hub is not yet approved for FTC use). - - Implements FTC Blocks programming support for REV Expansion Hub and sensor hardware. - - Detects and alerts when I2C device disconnect. - -## Version 2.62 (built on 17.01.07) - * Added null pointer check before calling modeToByte() in finishModeSwitchIfNecessary method for ModernRoboticsUsbDcMotorController class. - * Changes to enhance Modern Robotics USB protocol robustness. - -## Version 2.61 (released on 16.12.19) - * Blocks Programming mode changes: - - Fix to correct issue when an exception was thrown because an OpticalDistanceSensor object appears twice in the hardware map (the second time as a LightSensor). - -## Version 2.6 (released on 16.12.16) - * Fixes for Gyro class: - - Improve (decrease) sensor refresh latency. - - fix isCalibrating issues. - * Blocks Programming mode changes: - - Blocks now ignores a device in the configuration xml if the name is empty. Other devices work in configuration work fine. - -## Version 2.5 (internal release on released on 16.12.13) - * Blocks Programming mode changes: - - Added blocks support for AdafruitBNO055IMU. - - Added Download OpMode button to FtcBocks.html. - - Added support for copying blocks in one OpMode and pasting them in an other OpMode. The clipboard content is stored on the phone, so the programming mode server must be running. - - Modified Utilities section of the toolbox. - - In Programming Mode, display information about the active connections. - - Fixed paste location when workspace has been scrolled. - - Added blocks support for the android Accelerometer. - - Fixed issue where Blocks Upload OpMode truncated name at first dot. - - Added blocks support for Android SoundPool. - - Added type safety to blocks for Acceleration. - - Added type safety to blocks for AdafruitBNO055IMU.Parameters. - - Added type safety to blocks for AnalogInput. - - Added type safety to blocks for AngularVelocity. - - Added type safety to blocks for Color. - - Added type safety to blocks for ColorSensor. - - Added type safety to blocks for CompassSensor. - - Added type safety to blocks for CRServo. - - Added type safety to blocks for DigitalChannel. - - Added type safety to blocks for ElapsedTime. - - Added type safety to blocks for Gamepad. - - Added type safety to blocks for GyroSensor. - - Added type safety to blocks for IrSeekerSensor. - - Added type safety to blocks for LED. - - Added type safety to blocks for LightSensor. - - Added type safety to blocks for LinearOpMode. - - Added type safety to blocks for MagneticFlux. - - Added type safety to blocks for MatrixF. - - Added type safety to blocks for MrI2cCompassSensor. - - Added type safety to blocks for MrI2cRangeSensor. - - Added type safety to blocks for OpticalDistanceSensor. - - Added type safety to blocks for Orientation. - - Added type safety to blocks for Position. - - Added type safety to blocks for Quaternion. - - Added type safety to blocks for Servo. - - Added type safety to blocks for ServoController. - - Added type safety to blocks for Telemetry. - - Added type safety to blocks for Temperature. - - Added type safety to blocks for TouchSensor. - - Added type safety to blocks for UltrasonicSensor. - - Added type safety to blocks for VectorF. - - Added type safety to blocks for Velocity. - - Added type safety to blocks for VoltageSensor. - - Added type safety to blocks for VuforiaLocalizer.Parameters. - - Added type safety to blocks for VuforiaTrackable. - - Added type safety to blocks for VuforiaTrackables. - - Added type safety to blocks for enums in AdafruitBNO055IMU.Parameters. - - Added type safety to blocks for AndroidAccelerometer, AndroidGyroscope, AndroidOrientation, and AndroidTextToSpeech. - -## Version 2.4 (released on 16.11.13) - * Fix to avoid crashing for nonexistent resources. - * Blocks Programming mode changes: - - Added blocks to support OpenGLMatrix, MatrixF, and VectorF. - - Added blocks to support AngleUnit, AxesOrder, AxesReference, CameraDirection, CameraMonitorFeedback, DistanceUnit, and TempUnit. - - Added blocks to support Acceleration. - - Added blocks to support LinearOpMode.getRuntime. - - Added blocks to support MagneticFlux and Position. - - Fixed typos. - - Made blocks for ElapsedTime more consistent with other objects. - - Added blocks to support Quaternion, Velocity, Orientation, AngularVelocity. - - Added blocks to support VuforiaTrackables, VuforiaTrackable, VuforiaLocalizer, VuforiaTrackableDefaultListener. - - Fixed a few blocks. - - Added type checking to new blocks. - - Updated to latest blockly. - - Added default variable blocks to navigation and matrix blocks. - - Fixed toolbox entry for openGLMatrix_rotation_withAxesArgs. - - When user downloads Blocks-generated OpMode, only the .blk file is downloaded. - - When user uploads Blocks-generated OpMode (.blk file), Javascript code is auto generated. - - Added DbgLog support. - - Added logging when a blocks file is read/written. - - Fixed bug to properly render blocks even if missing devices from configuration file. - - Added support for additional characters (not just alphanumeric) for the block file names (for download and upload). - - Added support for OpMode flavor (“Autonomous” or “TeleOp”) and group. - * Changes to Samples to prevent tutorial issues. - * Incorporated suggested changes from public pull 216 (“Replace .. paths”). - * Remove Servo Glitches when robot stopped. - * if user hits “Cancels” when editing a configuration file, clears the unsaved changes and reverts to original unmodified configuration. - * Added log info to help diagnose why the Robot Controller app was terminated (for example, by watch dog function). - * Added ability to transfer log from the controller. - * Fixed inconsistency for AngularVelocity - * Limit unbounded growth of data for telemetry. If user does not call telemetry.update() for LinearOpMode in a timely manner, data added for telemetry might get lost if size limit is exceeded. - -## Version 2.35 (released on 16.10.06) - * Blockly programming mode - Removed unnecesary idle() call from blocks for new project. - -## Version 2.30 (released on 16.10.05) - * Blockly programming mode: - - Mechanism added to save Blockly OpModes from Programming Mode Server onto local device - - To avoid clutter, blocks are displayed in categorized folders - - Added support for DigitalChannel - - Added support for ModernRoboticsI2cCompassSensor - - Added support for ModernRoboticsI2cRangeSensor - - Added support for VoltageSensor - - Added support for AnalogInput - - Added support for AnalogOutput - - Fix for CompassSensor setMode block - * Vuforia - - Fix deadlock / make camera data available while Vuforia is running. - - Update to Vuforia 6.0.117 (recommended by Vuforia and Google to close security loophole). - * Fix for autonomous 30 second timer bug (where timer was in effect, even though it appeared to have timed out). - * opModeIsActive changes to allow cleanup after OpMode is stopped (with enforced 2 second safety timeout). - * Fix to avoid reading i2c twice. - * Updated sample OpModes. - * Improved logging and fixed intermittent freezing. - * Added digital I/O sample. - * Cleaned up device names in sample OpModes to be consistent with Pushbot guide. - * Fix to allow use of IrSeekerSensorV3. - -## Version 2.20 (released on 16.09.08) - * Support for Modern Robotics Compass Sensor. - * Support for Modern Robotics Range Sensor. - * Revise device names for Pushbot templates to match the names used in Pushbot guide. - * Fixed bug so that IrSeekerSensorV3 device is accessible as IrSeekerSensor in hardwareMap. - * Modified computer vision code to require an individual Vuforia license (per legal requirement from PTC). - * Minor fixes. - * Blockly enhancements: - - Support for Voltage Sensor. - - Support for Analog Input. - - Support for Analog Output. - - Support for Light Sensor. - - Support for Servo Controller. - -## Version 2.10 (released on 16.09.03) - * Support for Adafruit IMU. - * Improvements to ModernRoboticsI2cGyro class - - Block on reset of z axis. - - isCalibrating() returns true while gyro is calibration. - * Updated sample gyro program. - * Blockly enhancements - - support for android.graphics.Color. - - added support for ElapsedTime. - - improved look and legibility of blocks. - - support for compass sensor. - - support for ultrasonic sensor. - - support for IrSeeker. - - support for LED. - - support for color sensor. - - support for CRServo - - prompt user to configure robot before using programming mode. - * Provides ability to disable audio cues. - * various bug fixes and improvements. - -## Version 2.00 (released on 16.08.19) - * This is the new release for the upcoming 2016-2017 FIRST Tech Challenge Season. - * Channel change is enabled in the FTC Robot Controller app for Moto G 2nd and 3rd Gen phones. - * Users can now use annotations to register/disable their OpModes. - * Changes in the Android SDK, JDK and build tool requirements (minsdk=19, java 1.7, build tools 23.0.3). - * Standardized units in analog input. - * Cleaned up code for existing analog sensor classes. - * setChannelMode and getChannelMode were REMOVED from the DcMotorController class. This is important - we no longer set the motor modes through the motor controller. - * setMode and getMode were added to the DcMotor class. - * ContinuousRotationServo class has been added to the FTC SDK. - * Range.clip() method has been overloaded so it can support this operation for int, short and byte integers. - * Some changes have been made (new methods added) on how a user can access items from the hardware map. - * Users can now set the zero power behavior for a DC motor so that the motor will brake or float when power is zero. - * Prototype Blockly Programming Mode has been added to FTC Robot Controller. Users can place the Robot Controller into this mode, and then use a device (such as a laptop) that has a Javascript enabled browser to write Blockly-based OpModes directly onto the Robot Controller. - * Users can now configure the robot remotely through the FTC Driver Station app. - * Android Studio project supports Android Studio 2.1.x and compile SDK Version 23 (Marshmallow). - * Vuforia Computer Vision SDK integrated into FTC SDK. Users can use sample vision targets to get localization information on a standard FTC field. - * Project structure has been reorganized so that there is now a TeamCode package that users can use to place their local/custom OpModes into this package. - * Inspection function has been integrated into the FTC Robot Controller and Driver Station Apps (Thanks Team HazMat… 9277 & 10650!). - * Audio cues have been incorporated into FTC SDK. - * Swap mechanism added to FTC Robot Controller configuration activity. For example, if you have two motor controllers on a robot, and you misidentified them in your configuration file, you can use the Swap button to swap the devices within the configuration file (so you do not have to manually re-enter in the configuration info for the two devices). - * Fix mechanism added to all user to replace an electronic module easily. For example, suppose a servo controller dies on your robot. You replace the broken module with a new module, which has a different serial number from the original servo controller. You can use the Fix button to automatically reconfigure your configuration file to use the serial number of the new module. - * Improvements made to fix resiliency and responsiveness of the system. - * For LinearOpMode the user now must for a telemetry.update() to update the telemetry data on the driver station. This update() mechanism ensures that the driver station gets the updated data properly and at the same time. - * The Auto Configure function of the Robot Controller is now template based. If there is a commonly used robot configuration, a template can be created so that the Auto Configure mechanism can be used to quickly configure a robot of this type. - * The logic to detect a runaway OpMode (both in the LinearOpMode and OpMode types) and to abort the run, then auto recover has been improved/implemented. - * Fix has been incorporated so that Logitech F310 gamepad mappings will be correct for Marshmallow users. - -## Release 16.07.08 - - * For the ftc_app project, the gradle files have been modified to support Android Studio 2.1.x. - -## Release 16.03.30 - - * For the MIT App Inventor, the design blocks have new icons that better represent the function of each design component. - * Some changes were made to the shutdown logic to ensure the robust shutdown of some of our USB services. - * A change was made to LinearOpMode so as to allow a given instance to be executed more than once, which is required for the App Inventor. - * Javadoc improved/updated. - -## Release 16.03.09 - - * Changes made to make the FTC SDK synchronous (significant change!) - - waitOneFullHardwareCycle() and waitForNextHardwareCycle() are no longer needed and have been deprecated. - - runOpMode() (for a LinearOpMode) is now decoupled from the system's hardware read/write thread. - - loop() (for an OpMode) is now decoupled from the system's hardware read/write thread. - - Methods are synchronous. - - For example, if you call setMode(DcMotorController.RunMode.RESET_ENCODERS) for a motor, the encoder is guaranteed to be reset when the method call is complete. - - For legacy module (NXT compatible), user no longer has to toggle between read and write modes when reading from or writing to a legacy device. - * Changes made to enhance reliability/robustness during ESD event. - * Changes made to make code thread safe. - * Debug keystore added so that user-generated robot controller APKs will all use the same signed key (to avoid conflicts if a team has multiple developer laptops for example). - * Firmware version information for Modern Robotics modules are now logged. - * Changes made to improve USB comm reliability and robustness. - * Added support for voltage indicator for legacy (NXT-compatible) motor controllers. - * Changes made to provide auto stop capabilities for OpModes. - - A LinearOpMode class will stop when the statements in runOpMode() are complete. User does not have to push the stop button on the driver station. - - If an OpMode is stopped by the driver station, but there is a run away/uninterruptible thread persisting, the app will log an error message then force itself to crash to stop the runaway thread. - * Driver Station UI modified to display lowest measured voltage below current voltage (12V battery). - * Driver Station UI modified to have color background for current voltage (green=good, yellow=caution, red=danger, extremely low voltage). - * javadoc improved (edits and additional classes). - * Added app build time to About activity for driver station and robot controller apps. - * Display local IP addresses on Driver Station About activity. - * Added I2cDeviceSynchImpl. - * Added I2cDeviceSync interface. - * Added seconds() and milliseconds() to ElapsedTime for clarity. - * Added getCallbackCount() to I2cDevice. - * Added missing clearI2cPortActionFlag. - * Added code to create log messages while waiting for LinearOpMode shutdown. - * Fix so Wi-Fi Direct Config activity will no longer launch multiple times. - * Added the ability to specify an alternate i2c address in software for the Modern Robotics gyro. - -## Release 16.02.09 - - * Improved battery checker feature so that voltage values get refreshed regularly (every 250 msec) on Driver Station (DS) user interface. - * Improved software so that Robot Controller (RC) is much more resilient and “self-healing” to USB disconnects: - - If user attempts to start/restart RC with one or more module missing, it will display a warning but still start up. - - When running an OpMode, if one or more modules gets disconnected, the RC & DS will display warnings,and robot will keep on working in spite of the missing module(s). - - If a disconnected module gets physically reconnected the RC will auto detect the module and the user will regain control of the recently connected module. - - Warning messages are more helpful (identifies the type of module that’s missing plus its USB serial number). - * Code changes to fix the null gamepad reference when users try to reference the gamepads in the init() portion of their OpMode. - * NXT light sensor output is now properly scaled. Note that teams might have to readjust their light threshold values in their OpModes. - * On DS user interface, gamepad icon for a driver will disappear if the matching gamepad is disconnected or if that gamepad gets designated as a different driver. - * Robot Protocol (ROBOCOL) version number info is displayed in About screen on RC and DS apps. - * Incorporated a display filter on pairing screen to filter out devices that don’t use the “-“ format. This filter can be turned off to show all Wi-Fi Direct devices. - * Updated text in License file. - * Fixed formatting error in OpticalDistanceSensor.toString(). - * Fixed issue on with a blank (“”) device name that would disrupt Wi-Fi Direct Pairing. - * Made a change so that the Wi-Fi info and battery info can be displayed more quickly on the DS upon connecting to RC. - * Improved javadoc generation. - * Modified code to make it easier to support language localization in the future. - -## Release 16.01.04 - - * Updated compileSdkVersion for apps - * Prevent Wi-Fi from entering power saving mode - * removed unused import from driver station - * Corrrected "Dead zone" joystick code. - * LED.getDeviceName and .getConnectionInfo() return null - * apps check for ROBOCOL_VERSION mismatch - * Fix for Telemetry also has off-by-one errors in its data string sizing / short size limitations error - * User telemetry output is sorted. - * added formatting variants to DbgLog and RobotLog APIs - * code modified to allow for a long list of OpMode names. - * changes to improve thread safety of RobocolDatagramSocket - * Fix for "missing hardware leaves robot controller disconnected from driver station" error - * fix for "fast tapping of Init/Start causes problems" (toast is now only instantiated on UI thread). - * added some log statements for thread life cycle. - * moved gamepad reset logic inside of initActiveOpMode() for robustness - * changes made to mitigate risk of race conditions on public methods. - * changes to try and flag when Wi-Fi Direct name contains non-printable characters. - * fix to correct race condition between .run() and .close() in ReadWriteRunnableStandard. - * updated FTDI driver - * made ReadWriteRunnableStanard interface public. - * fixed off-by-one errors in Command constructor - * moved specific hardware implmentations into their own package. - * moved specific gamepad implemnatations to the hardware library. - * changed LICENSE file to new BSD version. - * fixed race condition when shutting down Modern Robotics USB devices. - * methods in the ColorSensor classes have been synchronized. - * corrected isBusy() status to reflect end of motion. - * corrected "back" button keycode. - * the notSupported() method of the GyroSensor class was changed to protected (it should not be public). - -## Release 15.11.04.001 - - * Added Support for Modern Robotics Gyro. - - The GyroSensor class now supports the MR Gyro Sensor. - - Users can access heading data (about Z axis) - - Users can also access raw gyro data (X, Y, & Z axes). - - Example MRGyroTest.java OpMode included. - * Improved error messages - - More descriptive error messages for exceptions in user code. - * Updated DcMotor API - * Enable read mode on new address in setI2cAddress - * Fix so that driver station app resets the gamepads when switching OpModes. - * USB-related code changes to make USB comm more responsive and to display more explicit error messages. - - Fix so that USB will recover properly if the USB bus returns garbage data. - - Fix USB initializtion race condition. - - Better error reporting during FTDI open. - - More explicit messages during USB failures. - - Fixed bug so that USB device is closed if event loop teardown method was not called. - * Fixed timer UI issue - * Fixed duplicate name UI bug (Legacy Module configuration). - * Fixed race condition in EventLoopManager. - * Fix to keep references stable when updating gamepad. - * For legacy Matrix motor/servo controllers removed necessity of appending "Motor" and "Servo" to controller names. - * Updated HT color sensor driver to use constants from ModernRoboticsUsbLegacyModule class. - * Updated MR color sensor driver to use constants from ModernRoboticsUsbDeviceInterfaceModule class. - * Correctly handle I2C Address change in all color sensors - * Updated/cleaned up OpModes. - - Updated comments in LinearI2cAddressChange.java example OpMode. - - Replaced the calls to "setChannelMode" with "setMode" (to match the new of the DcMotor method). - - Removed K9AutoTime.java OpMode. - - Added MRGyroTest.java OpMode (demonstrates how to use MR Gyro Sensor). - - Added MRRGBExample.java OpMode (demonstrates how to use MR Color Sensor). - - Added HTRGBExample.java OpMode (demonstrates how to use HT legacy color sensor). - - Added MatrixControllerDemo.java (demonstrates how to use legacy Matrix controller). - * Updated javadoc documentation. - * Updated release .apk files for Robot Controller and Driver Station apps. - -## Release 15.10.06.002 - - * Added support for Legacy Matrix 9.6V motor/servo controller. - * Cleaned up build.gradle file. - * Minor UI and bug fixes for driver station and robot controller apps. - * Throws error if Ultrasonic sensor (NXT) is not configured for legacy module port 4 or 5. - - -## Release 15.08.03.001 - - * New user interfaces for FTC Driver Station and FTC Robot Controller apps. - * An init() method is added to the OpMode class. - - For this release, init() is triggered right before the start() method. - - Eventually, the init() method will be triggered when the user presses an "INIT" button on driver station. - - The init() and loop() methods are now required (i.e., need to be overridden in the user's OpMode). - - The start() and stop() methods are optional. - * A new LinearOpMode class is introduced. - - Teams can use the LinearOpMode mode to create a linear (not event driven) program model. - - Teams can use blocking statements like Thread.sleep() within a linear OpMode. - * The API for the Legacy Module and Core Device Interface Module have been updated. - - Support for encoders with the Legacy Module is now working. - * The hardware loop has been updated for better performance. +### Feel Free to reach out on the [Offical Pedro Pathing Discord Server](https://discord.gg/2GfC4qBP5s)! diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/LOCALIZATION.md b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/LOCALIZATION.md index 18d1206..1d431e3 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/LOCALIZATION.md +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/LOCALIZATION.md @@ -4,10 +4,10 @@ the pose exponential method of localization. It's basically a way of turning mov robot's coordinate frame to the global coordinate frame. If you're interested in reading more about it, then check out pages 177 - 183 of [Controls Engineering in the FIRST Robotics Competition](https://file.tavsys.net/control/controls-engineering-in-frc.pdf) by Tyler Veness. However, the OTOS localizer uses its own onboard system for calculating localization, -which I do not know about. +which we do not know about. ## Setting Your Localizer -Go to line `70` in the `PoseUpdater` class, and replace the `new ThreeWheelLocalizer(hardwareMap)` +Go to line `73` in the `PoseUpdater` class, and replace the `new ThreeWheelLocalizer(hardwareMap)` with the localizer that applies to you: * If you're using drive encoders, put `new DriveEncoderLocalizer(hardwareMap)` * If you're using two wheel odometry, put `new TwoWheelLocalizer(hardwareMap)` @@ -15,6 +15,7 @@ with the localizer that applies to you: don't change it from the default * If you're using three wheel odometry with the IMU, put `new ThreeWheelIMULocalizer(hardwareMap)` * If you're using OTOS, put `new OTOSLocalizer(hardwareMap)` +* If you're using Pinpoint, put `new PinpointLocalizer(hardwareMap)` ## Tuning To start, you'll want to select your localizer of choice. Below, I'll have instructions for the drive @@ -241,6 +242,23 @@ that applies to you and follow the directions there. robot around and see if the movements look accurate on FTC Dashboard. If they don't, then you'll want to re-run some of the previous steps. Otherwise, congrats on tuning your localizer! +## Pinpoint Localizer +* First you will need to plug in the pinpoint to the i2c port. Ensure that the dead wheel encoder wires are + plugged into the proper ports on the pinpoint to ensure no error within the tuning steps. +* Then, go to the `PinpointLocalier.java` file and go to where it tells you to replace + the current statement with your pinpoint port in the constructor. Replace the `deviceName` parameter + with the name of the port that the pinpoint is connected to. +* Next, follow the instructions left by the TODO: comment and enter in the odometry measurements either in + mms or inches (We have the conversion rates listed). +* First, to ensure that your pinpoint is properly connected, please run the `SensorGoBildaPinpointExample.java` + file left in the `tuning` folder located within `localization`. +* Once completed, the localizer should be properly tuned. To test it out, you can go to + `Localization Test` and push around or drive around your robot. Go to [FTC Dashboard](http://192.168.43.1:8080/dash) + and on the top right, switch the drop down from the default view to the field view. Then, on the bottom + left corner, you should see a field and the robot being drawn on the field. You can then move your + robot around and see if the movements look accurate on FTC Dashboard. If they don't, then you'll + want to re-run some of the previous steps. Otherwise, congrats on tuning your localizer! + ## Using Road Runner's Localizer Of course, many teams have experience using Road Runner in the past and so have localizers from Road Runner that are tuned. There is an adapter for the Road Runner three wheel localizer to the Pedro diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/OVERVIEW.md b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/OVERVIEW.md index a0dbca1..ac88be4 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/OVERVIEW.md +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/OVERVIEW.md @@ -19,7 +19,7 @@ Why use Pedro Pathing? Why not something else like Road Runner or Pure Pursuit? * Pure Pursuit searches for the farthest point on the path that's within a certain radius from the robot. Pure Pursuit will then go in a straight line to that point. This poses several problems, as a small search radius will cause some oscillations on corners, and a large search radius will cut corners on paths, which makes the paths inaccurate to real life. * Pedro Pathing instead corrects to the closest point on the path while still following the path. This ensures that the follower will stay on the path while still being able to move forward along the path without cutting corners or encountering oscillation issues. * Why not Road Runner? - * Road Runner is a motion profile based follower, which means that a set of instructions for motor powers are calculated for each path beforehand and then run. After reaching the end of this motion profile, Road Runner corrects. This can be sufficient for most situations, but if the robot encounters an obstacle or wheel slippage, it may be unable to correct in time. + * Road Runner is a motion profile based follower, which means that a set of instructions for motor powers are calculated for each path beforehand and then run. During this motion profile, Road Runner can struggle to correct. This can be sufficient for many situations, but if the robot encounters an obstacle or wheel slippage, it may be unable to correct in time. * Pedro Pathing instead dynamically corrects throughout the path. The movement vectors are calculated at every point along the path, and because of this, the path can even be changed midway through and Pedro Pathing will still be able to correct. Since correction occurs throughout the path, the error correction isn't concentrated on the end of the path and therefore the robot is able to better minimize error. ## How Does Pedro Path? @@ -105,4 +105,5 @@ of the curvature formula, we can estimate a centripetal force correction and app control. ## Questions? -If you still have more questions, feel free to contact us at `scottsbots10158@gmail.com` \ No newline at end of file +If you still have more questions, feel free to contact us at `scottsbots10158@gmail.com` or +within our discord linked here(https://discord.gg/2GfC4qBP5s) diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/TUNING.md b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/TUNING.md index c7fa481..cf39512 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/TUNING.md +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/TUNING.md @@ -14,45 +14,56 @@ that the tuners require you to push the robot or the tuners output will say "inc measurements will be in centimeters. ## Tuning -* To start with, we need the mass of the robot in kg. This is used for the centripetal force correction, - and the mass, with the variable name `mass`, should be put on line `86` in the `FollowerConstants` +* First, make sure that your motor names and directions, located at the top of `FollowerConstants`, + are correct. + +* After that, we need the mass of the robot in kg. This is used for the centripetal force correction, + and the mass, with the variable name `mass`, should be put on line `92` in the `FollowerConstants` class under the `tuning` package. * Next, we need to find the preferred mecanum drive vectors. The rollers on mecanum wheels point at a 45 degree angle from the forward direction, but the actual direction the force is output is actually - closer to forward. To find the direction your wheels will go, you will need to run the + closer to forward. Before running any OpModes, make sure your motors are reversed properly in the + `Follower` class constructor. To find the direction your wheels will go, you will need to run the `Forward Velocity Tuner` and `Strafe Velocity Tuner` OpModes. These will run your robot at full power for 40 inches forward and to the right, respectively. The distance can be changed through FTC Dashboard under the dropdown for each respective class, but higher distances work better. After the distance has finished running, the end velocity will be output to telemetry. The robot may continue to drift a little bit after the robot has finished running the distance, so make sure you have - plenty of room. Once you're done, put the velocity for the `Forward Velocity Tuner` on line `33` in - the `FollowerConstants` class, and the velocity for the `Strafe Velocity Tuner` on line `34` in the + plenty of room. Once you're done, put the velocity for the `Forward Velocity Tuner` on line `39` in + the `FollowerConstants` class, and the velocity for the `Strafe Velocity Tuner` on line `40` in the `FollowerConstants` class. The variable names should be `xMovement` and `yMovement`, respectively. * The last set of automatic tuners you'll need to run are the zero power acceleration tuners. These find the rate at which your robot decelerates when power is cut from the drivetrain. This is used to get a more accurate estimation of the drive vector. To find this, you will need to run the `Forward Zero Power Acceleration Tuner` and the `Lateral Zero Power Acceleration Tuner` OpModes. - These will run your robot until it hits a velocity of 10 inches/second forward and to the right, + These will run your robot until it hits a velocity of 30 inches/second forward and to the right, respectively. The velocity can be changed through FTC Dashboard under the dropdown for each respective class, but higher velocities work better. After the velocity has been reached, power will be cut from the drivetrain and the robot's deceleration will be tracked until the robot stops, at which point it will display the deceleration in telemetry. This robot will need to drift to a stop to properly work, and the higher the velocity the greater the drift distance, so make sure you have enough room. Once you're done, put the zero power acceleration for the - `Forward Zero Power Acceleration Tuner` on line `94` in the `FollowerConstants` class and the zero - power acceleration for the `Lateral Zero Power Acceleration Tuner` on line `98` in the + `Forward Zero Power Acceleration Tuner` on line `100` in the `FollowerConstants` class and the zero + power acceleration for the `Lateral Zero Power Acceleration Tuner` on line `104` in the `FollowerConstants` class. The variable names should be `forwardZeroPowerAcceleration` and `lateralZeroPowerAcceleration`, respectively. * After this, we will want to tune the translational PID. Go to FTC Dashboard and disable all but - the `useTranslational` checkboxes under the `Follower` tab. Then, run `StraightBackAndForth`. Make - sure you disable the timer on autonomous OpModes. The PID for the translational error is called - `translationalPIDF`. If you need to add a feedforward value, use the `translationalPIDFFeedForward` - since that will add the feedforward in the direction the robot is trying to move, rather than the - feedforward in the PIDF itself, since those will only add the feedforward one way. You can change - the PIDF constants and feedforward values, under the `FollowerConstants` tab in FTC Dashboard. + the `useTranslational` checkboxes under the `Follower` tab. Then, run `StraightBackAndForth`. + Make sure you disable the timer on autonomous OpModes. You will notice in telemetry a message saying + that the robot will travel a distance forward and backward, this will not happen until later, so for + now you can ignore this message. The robot should not move when you run the opmode initally. Instead, + it should correct when you push it away from its starting position. Note that this correction should + happen regardless of the robot's rotation, and that the robot should not rotate itself (if it does, + disable `useHeading` as mentioned prior). Also note that the robot will only correct to an imaginary line + that runs straight forward from the robot's starting position, meaning that it will not correct in the + (original) forward direction. The PID for the translational error is called `translationalPIDF`. + If you need to add a feedforward value, use the `translationalPIDFFeedForward` since that will add + the feedforward in the direction the robot is trying to move, rather than the feedforward in the + PIDF itself, since those will only add the feedforward one way. You can change the PIDF constants + and feedforward values, under the `FollowerConstants` tab in FTC Dashboard. To tune the PID, push the robot off the path and see how corrects. You will want to alternate sides you push to reduce field wear and tear as well as push with varying power and distance. I would recommend tuning the PID so that it is capable of correcting while minimizing oscillations and still @@ -70,9 +81,8 @@ measurements will be in centimeters. `zeroPowerAccelerationMultiplier`. This determines how fast your robot will decelerate as a factor of how fast your robot will coast to a stop. Honestly, this is up to you. I personally used 4, but what works best for you is most important. Higher numbers will cause a faster brake, but increase - oscillations at the end. Lower numbers will do the opposite. This can be found on line `107` in - `FollowerConstants`, named `zeroPowerAccelerationMultiplier`. The drive PID is much, much more -* sensitive than the others. For reference, + oscillations at the end. Lower numbers will do the opposite. This can be found on line `113` in + `FollowerConstants`, named `zeroPowerAccelerationMultiplier`. The drive PID is much, much more sensitive than the others. For reference, my P values were in the hundredths and thousandths place values, and my D values were in the hundred thousandths and millionths place values. To tune this, enable `useDrive`, `useHeading`, and `useTranslational` in the `Follower` dropdown in FTC Dashboard. Next, run `StraightBackAndForth` @@ -97,7 +107,7 @@ measurements will be in centimeters. * Finally, we will want to tune the centripetal force correction. This is a pretty simple tune. Open up FTC Dashboard and enable everything under the `Follower` tab. Then, run `CurvedBackAndForth` and turn off its timer. If you notice the robot is correcting towards the inside of the curve - as/after running a path, then increase `centripetalScaling`, which can be found on line `89` of + as/after running a path, then increase `centripetalScaling`, which can be found on line `95` of `FollowerConstants`. If the robot is correcting towards the outside of the curve, then decrease `centripetalScaling`. @@ -110,11 +120,11 @@ measurements will be in centimeters. ## Note About the PIDs In versions of Pedro Pathing before early August 2024, there were 2 PIDs used in the translational, heading, and drive control. However, now there is only one main PID. The old system can still be used. -Scroll down to the bottom of `FollowerConstants` and set all the booleans from lines `157` to `159` +Scroll down to the bottom of `FollowerConstants` and set all the booleans from lines `163` to `165` to true. They should be named `useSecondaryTranslationalPID`, `useSecondaryHeadingPID`, and `useSecondaryDrivePID`. This will enable the two PID system that Pedro Pathing originally used. From there, scroll down and all the values pertaining to the secondary PIDs will be there. The two PID system works with a PID that handles larger errors (the main PID) and a second PID to handle smaller errors (the secondary PID). The main PID should be tuned to move the error within the secondary PID's range without providing too much momentum that could cause an overshoot. The secondary PID should be tuned -to correct within its range quickly and accurately while minimizing oscillations. \ No newline at end of file +to correct within its range quickly and accurately while minimizing oscillations. diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/DriveVectorScaler.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/DriveVectorScaler.java index f2cfcfd..a6ed238 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/DriveVectorScaler.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/DriveVectorScaler.java @@ -15,6 +15,7 @@ import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; public class DriveVectorScaler { // This is ordered left front, left back, right front, right back. These are also normalized. private Vector[] mecanumVectors; + private double maxPowerScaling = 1; /** * This creates a new DriveVectorScaler, which takes in various movement vectors and outputs @@ -52,9 +53,9 @@ public class DriveVectorScaler { */ public double[] getDrivePowers(Vector correctivePower, Vector headingPower, Vector pathingPower, double robotHeading) { // clamps down the magnitudes of the input vectors - if (correctivePower.getMagnitude() > 1) correctivePower.setMagnitude(1); - if (headingPower.getMagnitude() > 1) headingPower.setMagnitude(1); - if (pathingPower.getMagnitude() > 1) pathingPower.setMagnitude(1); + if (correctivePower.getMagnitude() > maxPowerScaling) correctivePower.setMagnitude(maxPowerScaling); + if (headingPower.getMagnitude() > maxPowerScaling) headingPower.setMagnitude(maxPowerScaling); + if (pathingPower.getMagnitude() > maxPowerScaling) pathingPower.setMagnitude(maxPowerScaling); // the powers for the wheel vectors double [] wheelPowers = new double[4]; @@ -65,8 +66,8 @@ public class DriveVectorScaler { // this contains the pathing vectors, one for each side (heading control requires 2) Vector[] truePathingVectors = new Vector[2]; - if (correctivePower.getMagnitude() == 1) { - // checks for corrective power equal to 1 in magnitude. if equal to one, then set pathing power to that + if (correctivePower.getMagnitude() == maxPowerScaling) { + // checks for corrective power equal to max power scaling in magnitude. if equal, then set pathing power to that truePathingVectors[0] = MathFunctions.copyVector(correctivePower); truePathingVectors[1] = MathFunctions.copyVector(correctivePower); } else { @@ -74,7 +75,7 @@ public class DriveVectorScaler { Vector leftSideVector = MathFunctions.subtractVectors(correctivePower, headingPower); Vector rightSideVector = MathFunctions.addVectors(correctivePower, headingPower); - if (leftSideVector.getMagnitude() > 1 || rightSideVector.getMagnitude() > 1) { + if (leftSideVector.getMagnitude() > maxPowerScaling || rightSideVector.getMagnitude() > maxPowerScaling) { //if the combined corrective and heading power is greater than 1, then scale down heading power double headingScalingFactor = Math.min(findNormalizingScaling(correctivePower, headingPower), findNormalizingScaling(correctivePower, MathFunctions.scalarMultiplyVector(headingPower, -1))); truePathingVectors[0] = MathFunctions.subtractVectors(correctivePower, MathFunctions.scalarMultiplyVector(headingPower, headingScalingFactor)); @@ -84,7 +85,7 @@ public class DriveVectorScaler { Vector leftSideVectorWithPathing = MathFunctions.addVectors(leftSideVector, pathingPower); Vector rightSideVectorWithPathing = MathFunctions.addVectors(rightSideVector, pathingPower); - if (leftSideVectorWithPathing.getMagnitude() > 1 || rightSideVectorWithPathing.getMagnitude() > 1) { + if (leftSideVectorWithPathing.getMagnitude() > maxPowerScaling || rightSideVectorWithPathing.getMagnitude() > maxPowerScaling) { // too much power now, so we scale down the pathing vector double pathingScalingFactor = Math.min(findNormalizingScaling(leftSideVector, pathingPower), findNormalizingScaling(rightSideVector, pathingPower)); truePathingVectors[0] = MathFunctions.addVectors(leftSideVector, MathFunctions.scalarMultiplyVector(pathingPower, pathingScalingFactor)); @@ -113,7 +114,7 @@ public class DriveVectorScaler { wheelPowers[3] = (mecanumVectorsCopy[2].getXComponent()*truePathingVectors[1].getYComponent() - truePathingVectors[1].getXComponent()*mecanumVectorsCopy[2].getYComponent()) / (mecanumVectorsCopy[2].getXComponent()*mecanumVectorsCopy[3].getYComponent() - mecanumVectorsCopy[3].getXComponent()*mecanumVectorsCopy[2].getYComponent()); double wheelPowerMax = Math.max(Math.max(Math.abs(wheelPowers[0]), Math.abs(wheelPowers[1])), Math.max(Math.abs(wheelPowers[2]), Math.abs(wheelPowers[3]))); - if (wheelPowerMax > 1) { + if (wheelPowerMax > maxPowerScaling) { wheelPowers[0] /= wheelPowerMax; wheelPowers[1] /= wheelPowerMax; wheelPowers[2] /= wheelPowerMax; @@ -126,12 +127,12 @@ public class DriveVectorScaler { /** * This takes in two Vectors, one static and one variable, and returns the scaling factor that, * when multiplied to the variable Vector, results in magnitude of the sum of the static Vector - * and the scaled variable Vector being 1. + * and the scaled variable Vector being the max power scaling. * * IMPORTANT NOTE: I did not intend for this to be used for anything other than the method above - * this one in this class, so there will be errors if you input Vectors of length greater than 1, + * this one in this class, so there will be errors if you input Vectors of length greater than maxPowerScaling, * and it will scale up the variable Vector if the magnitude of the sum of the two input Vectors - * isn't greater than 1. So, just don't use this elsewhere. There's gotta be a better way to do + * isn't greater than maxPowerScaling. So, just don't use this elsewhere. There's gotta be a better way to do * whatever you're trying to do. * * I know that this is used outside of this class, however, I created this method so I get to @@ -140,13 +141,32 @@ public class DriveVectorScaler { * * @param staticVector the Vector that is held constant. * @param variableVector the Vector getting scaled to make the sum of the input Vectors have a - * magnitude of 1. + * magnitude of maxPowerScaling. * @return returns the scaling factor for the variable Vector. */ public double findNormalizingScaling(Vector staticVector, Vector variableVector) { double a = Math.pow(variableVector.getXComponent(), 2) + Math.pow(variableVector.getYComponent(), 2); double b = staticVector.getXComponent() * variableVector.getXComponent() + staticVector.getYComponent() * variableVector.getYComponent(); - double c = Math.pow(staticVector.getXComponent(), 2) + Math.pow(staticVector.getYComponent(), 2) - 1.0; + double c = Math.pow(staticVector.getXComponent(), 2) + Math.pow(staticVector.getYComponent(), 2) - Math.pow(maxPowerScaling, 2); return (-b + Math.sqrt(Math.pow(b, 2) - a*c))/(a); + + } + + /** + * Sets the maximum power that can be used by the drive vector scaler. Clamped between 0 and 1. + * + * @param maxPowerScaling setting the max power scaling + */ + public void setMaxPowerScaling(double maxPowerScaling) { + this.maxPowerScaling = MathFunctions.clamp(maxPowerScaling, 0, 1); + } + + /** + * Gets the maximum power that can be used by the drive vector scaler. Ranges between 0 and 1. + * + * @return returns the max power scaling + */ + public double getMaxPowerScaling() { + return maxPowerScaling; } } diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/Follower.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/Follower.java index 649c126..2b48991 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/Follower.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/Follower.java @@ -1,20 +1,23 @@ package org.firstinspires.ftc.teamcode.pedroPathing.follower; -import static org.firstinspires.ftc.teamcode.PedroConstants.*; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.drivePIDFFeedForward; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.drivePIDFSwitch; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.forwardZeroPowerAcceleration; -import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.headingPIDFSwitch; -import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.drivePIDFFeedForward; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.headingPIDFFeedForward; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.headingPIDFSwitch; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.lateralZeroPowerAcceleration; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName; -import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.translationalPIDFFeedForward; -import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.lateralZeroPowerAcceleration; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.secondaryDrivePIDFFeedForward; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.secondaryHeadingPIDFFeedForward; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.secondaryTranslationalPIDFFeedForward; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.translationalPIDFFeedForward; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.translationalPIDFSwitch; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.useSecondaryDrivePID; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.useSecondaryHeadingPID; @@ -24,7 +27,6 @@ import com.acmerobotics.dashboard.config.Config; import com.acmerobotics.dashboard.telemetry.MultipleTelemetry; import com.qualcomm.robotcore.hardware.DcMotor; import com.qualcomm.robotcore.hardware.DcMotorEx; -import com.qualcomm.robotcore.hardware.DcMotorSimple; import com.qualcomm.robotcore.hardware.HardwareMap; import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType; @@ -94,7 +96,6 @@ public class Follower { private boolean holdPositionAtEnd; private boolean teleopDrive; - private double maxPower = 1; private double previousSecondaryTranslationalIntegral; private double previousTranslationalIntegral; private double holdPointTranslationalScaling = FollowerConstants.holdPointTranslationalScaling; @@ -153,6 +154,16 @@ public class Follower { initialize(); } + /** + * This creates a new Follower given a HardwareMap and a localizer. + * @param hardwareMap HardwareMap required + * @param localizer the localizer you wish to use + */ + public Follower(HardwareMap hardwareMap, Localizer localizer) { + this.hardwareMap = hardwareMap; + initialize(localizer); + } + /** * This initializes the follower. * In this, the DriveVectorScaler and PoseUpdater is instantiated, the drive motors are @@ -197,18 +208,7 @@ public class Follower { * @param set This caps the motor power from [0, 1]. */ public void setMaxPower(double set) { - maxPower = MathFunctions.clamp(set, 0, 1); - } - - /** - * This handles the limiting of the drive powers array to the max power. - */ - public void limitDrivePowers() { - for (int i = 0; i < drivePowers.length; i++) { - if (Math.abs(drivePowers[i]) > maxPower) { - drivePowers[i] = maxPower * MathFunctions.getSign(drivePowers[i]); - } - } + driveVectorScaler.setMaxPowerScaling(set); } /** @@ -368,6 +368,25 @@ public class Follower { closestPose = currentPath.getClosestPoint(poseUpdater.getPose(), 1); } + /** + * This holds a Point. + * + * @param point the Point to stay at. + * @param heading the heading to face. + */ + public void holdPoint(Point point, double heading) { + holdPoint(new BezierPoint(point), heading); + } + + /** + * This holds a Point. + * + * @param pose the Point (as a Pose) to stay at. + */ + public void holdPoint(Pose pose) { + holdPoint(new Point(pose), pose.getHeading()); + } + /** * This follows a Path. * This also makes the Follower hold the last Point on the Path. @@ -429,15 +448,22 @@ public class Follower { } /** - * This calls an update to the PoseUpdater, which updates the robot's current position estimate. - * This also updates all the Follower's PIDFs, which updates the motor powers. + * Calls an update to the PoseUpdater, which updates the robot's current position estimate. */ - public void update() { + public void updatePose() { poseUpdater.update(); if (drawOnDashboard) { dashboardPoseTracker.update(); } + } + + /** + * This calls an update to the PoseUpdater, which updates the robot's current position estimate. + * This also updates all the Follower's PIDFs, which updates the motor powers. + */ + public void update() { + updatePose(); if (!teleopDrive) { if (currentPath != null) { @@ -446,8 +472,6 @@ public class Follower { drivePowers = driveVectorScaler.getDrivePowers(MathFunctions.scalarMultiplyVector(getTranslationalCorrection(), holdPointTranslationalScaling), MathFunctions.scalarMultiplyVector(getHeadingVector(), holdPointHeadingScaling), new Vector(), poseUpdater.getPose().getHeading()); - limitDrivePowers(); - for (int i = 0; i < motors.size(); i++) { motors.get(i).setPower(drivePowers[i]); } @@ -459,8 +483,6 @@ public class Follower { drivePowers = driveVectorScaler.getDrivePowers(getCorrectiveVector(), getHeadingVector(), getDriveVector(), poseUpdater.getPose().getHeading()); - limitDrivePowers(); - for (int i = 0; i < motors.size(); i++) { motors.get(i).setPower(drivePowers[i]); } @@ -503,8 +525,6 @@ public class Follower { drivePowers = driveVectorScaler.getDrivePowers(getCentripetalForceCorrection(), teleopHeadingVector, teleopDriveVector, poseUpdater.getPose().getHeading()); - limitDrivePowers(); - for (int i = 0; i < motors.size(); i++) { motors.get(i).setPower(drivePowers[i]); } @@ -518,7 +538,7 @@ public class Follower { * movement, this is the x-axis. * @param lateralDrive determines the lateral drive vector for the robot in teleop. In field centric * movement, this is the y-axis. - * @param heading determines the heading vector for the robot in teleop. + * @param heading determines the heading vector for the robot in teleop. */ public void setTeleOpMovementVectors(double forwardDrive, double lateralDrive, double heading) { setTeleOpMovementVectors(forwardDrive, lateralDrive, heading, true); @@ -531,7 +551,7 @@ public class Follower { * movement, this is the x-axis. * @param lateralDrive determines the lateral drive vector for the robot in teleop. In field centric * movement, this is the y-axis. - * @param heading determines the heading vector for the robot in teleop. + * @param heading determines the heading vector for the robot in teleop. * @param robotCentric sets if the movement will be field or robot centric */ public void setTeleOpMovementVectors(double forwardDrive, double lateralDrive, double heading, boolean robotCentric) { @@ -670,19 +690,19 @@ public class Follower { public Vector getDriveVector() { if (!useDrive) return new Vector(); if (followingPathChain && chainIndex < currentPathChain.size() - 1) { - return new Vector(1, currentPath.getClosestPointTangentVector().getTheta()); + return new Vector(driveVectorScaler.getMaxPowerScaling(), currentPath.getClosestPointTangentVector().getTheta()); } driveError = getDriveVelocityError(); if (Math.abs(driveError) < drivePIDFSwitch && useSecondaryDrivePID) { secondaryDrivePIDF.updateError(driveError); - driveVector = new Vector(MathFunctions.clamp(secondaryDrivePIDF.runPIDF() + secondaryDrivePIDFFeedForward * MathFunctions.getSign(driveError), -1, 1), currentPath.getClosestPointTangentVector().getTheta()); + driveVector = new Vector(MathFunctions.clamp(secondaryDrivePIDF.runPIDF() + secondaryDrivePIDFFeedForward * MathFunctions.getSign(driveError), -driveVectorScaler.getMaxPowerScaling(), driveVectorScaler.getMaxPowerScaling()), currentPath.getClosestPointTangentVector().getTheta()); return MathFunctions.copyVector(driveVector); } drivePIDF.updateError(driveError); - driveVector = new Vector(MathFunctions.clamp(drivePIDF.runPIDF() + drivePIDFFeedForward * MathFunctions.getSign(driveError), -1, 1), currentPath.getClosestPointTangentVector().getTheta()); + driveVector = new Vector(MathFunctions.clamp(drivePIDF.runPIDF() + drivePIDFFeedForward * MathFunctions.getSign(driveError), -driveVectorScaler.getMaxPowerScaling(), driveVectorScaler.getMaxPowerScaling()), currentPath.getClosestPointTangentVector().getTheta()); return MathFunctions.copyVector(driveVector); } @@ -722,7 +742,7 @@ public class Follower { Vector velocityErrorVector = MathFunctions.addVectors(forwardVelocityError, lateralVelocityError); previousRawDriveError = rawDriveError; - rawDriveError = velocityErrorVector.getMagnitude() * MathFunctions.getSign(MathFunctions.dotProduct(velocityErrorVector, currentPath.getClosestPointTangentVector())); + rawDriveError = velocityErrorVector.getMagnitude() * MathFunctions.getSign(MathFunctions.dotProduct(velocityErrorVector, currentPath.getClosestPointTangentVector())); double projection = 2 * driveErrors[1] - driveErrors[0]; @@ -751,11 +771,11 @@ public class Follower { headingError = MathFunctions.getTurnDirection(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal()) * MathFunctions.getSmallestAngleDifference(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal()); if (Math.abs(headingError) < headingPIDFSwitch && useSecondaryHeadingPID) { secondaryHeadingPIDF.updateError(headingError); - headingVector = new Vector(MathFunctions.clamp(secondaryHeadingPIDF.runPIDF() + secondaryHeadingPIDFFeedForward * MathFunctions.getTurnDirection(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal()), -1, 1), poseUpdater.getPose().getHeading()); + headingVector = new Vector(MathFunctions.clamp(secondaryHeadingPIDF.runPIDF() + secondaryHeadingPIDFFeedForward * MathFunctions.getTurnDirection(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal()), -driveVectorScaler.getMaxPowerScaling(), driveVectorScaler.getMaxPowerScaling()), poseUpdater.getPose().getHeading()); return MathFunctions.copyVector(headingVector); } headingPIDF.updateError(headingError); - headingVector = new Vector(MathFunctions.clamp(headingPIDF.runPIDF() + headingPIDFFeedForward * MathFunctions.getTurnDirection(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal()), -1, 1), poseUpdater.getPose().getHeading()); + headingVector = new Vector(MathFunctions.clamp(headingPIDF.runPIDF() + headingPIDFFeedForward * MathFunctions.getTurnDirection(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal()), -driveVectorScaler.getMaxPowerScaling(), driveVectorScaler.getMaxPowerScaling()), poseUpdater.getPose().getHeading()); return MathFunctions.copyVector(headingVector); } @@ -772,7 +792,7 @@ public class Follower { Vector translational = getTranslationalCorrection(); Vector corrective = MathFunctions.addVectors(centripetal, translational); - if (corrective.getMagnitude() > 1) { + if (corrective.getMagnitude() > driveVectorScaler.getMaxPowerScaling()) { return MathFunctions.addVectors(centripetal, MathFunctions.scalarMultiplyVector(translational, driveVectorScaler.findNormalizingScaling(centripetal, translational))); } @@ -821,7 +841,7 @@ public class Follower { translationalVector = MathFunctions.addVectors(translationalVector, translationalIntegralVector); } - translationalVector.setMagnitude(MathFunctions.clamp(translationalVector.getMagnitude(), 0, 1)); + translationalVector.setMagnitude(MathFunctions.clamp(translationalVector.getMagnitude(), 0, driveVectorScaler.getMaxPowerScaling())); this.translationalVector = MathFunctions.copyVector(translationalVector); @@ -860,7 +880,7 @@ public class Follower { curvature = (yDoublePrime) / (Math.pow(Math.sqrt(1 + Math.pow(yPrime, 2)), 3)); } if (Double.isNaN(curvature)) return new Vector(); - centripetalVector = new Vector(MathFunctions.clamp(FollowerConstants.centripetalScaling * FollowerConstants.mass * Math.pow(MathFunctions.dotProduct(poseUpdater.getVelocity(), MathFunctions.normalizeVector(currentPath.getClosestPointTangentVector())), 2) * curvature, -1, 1), currentPath.getClosestPointTangentVector().getTheta() + Math.PI / 2 * MathFunctions.getSign(currentPath.getClosestPointNormalVector().getTheta())); + centripetalVector = new Vector(MathFunctions.clamp(FollowerConstants.centripetalScaling * FollowerConstants.mass * Math.pow(MathFunctions.dotProduct(poseUpdater.getVelocity(), MathFunctions.normalizeVector(currentPath.getClosestPointTangentVector())), 2) * curvature, -driveVectorScaler.getMaxPowerScaling(), driveVectorScaler.getMaxPowerScaling()), currentPath.getClosestPointTangentVector().getTheta() + Math.PI / 2 * MathFunctions.getSign(currentPath.getClosestPointNormalVector().getTheta())); return centripetalVector; } @@ -996,7 +1016,7 @@ public class Follower { /** * This resets the IMU, if applicable. */ - public void resetIMU() { + private void resetIMU() throws InterruptedException { poseUpdater.resetIMU(); } } diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/GoBildaPinpointDriver.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/GoBildaPinpointDriver.java new file mode 100644 index 0000000..882ed56 --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/GoBildaPinpointDriver.java @@ -0,0 +1,520 @@ + +/* MIT License + * Copyright (c) [2024] [Base 10 Assets, LLC] + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.firstinspires.ftc.teamcode.pedroPathing.localization; + +import static com.qualcomm.robotcore.util.TypeConversion.byteArrayToInt; + +import com.qualcomm.hardware.lynx.LynxI2cDeviceSynch; +import com.qualcomm.robotcore.hardware.I2cAddr; +import com.qualcomm.robotcore.hardware.I2cDeviceSynchDevice; +import com.qualcomm.robotcore.hardware.I2cDeviceSynchSimple; +import com.qualcomm.robotcore.hardware.configuration.annotations.DeviceProperties; +import com.qualcomm.robotcore.hardware.configuration.annotations.I2cDeviceType; +import com.qualcomm.robotcore.util.TypeConversion; + +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; +import org.firstinspires.ftc.robotcore.external.navigation.Pose2D; + +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.Arrays; + + +@I2cDeviceType +@DeviceProperties( + name = "goBILDA® Pinpoint Odometry Computer", + xmlTag = "goBILDAPinpoint", + description ="goBILDA® Pinpoint Odometry Computer (IMU Sensor Fusion for 2 Wheel Odometry)" +) + +public class GoBildaPinpointDriver extends I2cDeviceSynchDevice { + + private int deviceStatus = 0; + private int loopTime = 0; + private int xEncoderValue = 0; + private int yEncoderValue = 0; + private float xPosition = 0; + private float yPosition = 0; + private float hOrientation = 0; + private float xVelocity = 0; + private float yVelocity = 0; + private float hVelocity = 0; + + private static final float goBILDA_SWINGARM_POD = 13.26291192f; //ticks-per-mm for the goBILDA Swingarm Pod + private static final float goBILDA_4_BAR_POD = 19.89436789f; //ticks-per-mm for the goBILDA 4-Bar Pod + + //i2c address of the device + public static final byte DEFAULT_ADDRESS = 0x31; + + public GoBildaPinpointDriver(I2cDeviceSynchSimple deviceClient, boolean deviceClientIsOwned) { + super(deviceClient, deviceClientIsOwned); + + this.deviceClient.setI2cAddress(I2cAddr.create7bit(DEFAULT_ADDRESS)); + super.registerArmingStateCallback(false); + } + + + @Override + public Manufacturer getManufacturer() { + return Manufacturer.Other; + } + + @Override + protected synchronized boolean doInitialize() { + ((LynxI2cDeviceSynch)(deviceClient)).setBusSpeed(LynxI2cDeviceSynch.BusSpeed.FAST_400K); + return true; + } + + @Override + public String getDeviceName() { + return "goBILDA® Pinpoint Odometry Computer"; + } + + + //Register map of the i2c device + private enum Register { + DEVICE_ID (1), + DEVICE_VERSION (2), + DEVICE_STATUS (3), + DEVICE_CONTROL (4), + LOOP_TIME (5), + X_ENCODER_VALUE (6), + Y_ENCODER_VALUE (7), + X_POSITION (8), + Y_POSITION (9), + H_ORIENTATION (10), + X_VELOCITY (11), + Y_VELOCITY (12), + H_VELOCITY (13), + MM_PER_TICK (14), + X_POD_OFFSET (15), + Y_POD_OFFSET (16), + YAW_SCALAR (17), + BULK_READ (18); + + private final int bVal; + + Register(int bVal){ + this.bVal = bVal; + } + } + + //Device Status enum that captures the current fault condition of the device + public enum DeviceStatus{ + NOT_READY (0), + READY (1), + CALIBRATING (1 << 1), + FAULT_X_POD_NOT_DETECTED (1 << 2), + FAULT_Y_POD_NOT_DETECTED (1 << 3), + FAULT_NO_PODS_DETECTED (1 << 2 | 1 << 3), + FAULT_IMU_RUNAWAY (1 << 4); + + private final int status; + + DeviceStatus(int status){ + this.status = status; + } + } + + //enum that captures the direction the encoders are set to + public enum EncoderDirection{ + FORWARD, + REVERSED; + } + + //enum that captures the kind of goBILDA odometry pods, if goBILDA pods are used + public enum GoBildaOdometryPods { + goBILDA_SWINGARM_POD, + goBILDA_4_BAR_POD; + } + //enum that captures a limited scope of read data. More options may be added in future update + public enum readData { + ONLY_UPDATE_HEADING, + } + + + /** Writes an int to the i2c device + @param reg the register to write the int to + @param i the integer to write to the register + */ + private void writeInt(final Register reg, int i){ + deviceClient.write(reg.bVal, TypeConversion.intToByteArray(i,ByteOrder.LITTLE_ENDIAN)); + } + + /** + * Reads an int from a register of the i2c device + * @param reg the register to read from + * @return returns an int that contains the value stored in the read register + */ + private int readInt(Register reg){ + return byteArrayToInt(deviceClient.read(reg.bVal,4), ByteOrder.LITTLE_ENDIAN); + } + + /** + * Converts a byte array to a float value + * @param byteArray byte array to transform + * @param byteOrder order of byte array to convert + * @return the float value stored by the byte array + */ + private float byteArrayToFloat(byte[] byteArray, ByteOrder byteOrder){ + return ByteBuffer.wrap(byteArray).order(byteOrder).getFloat(); + } + /** + * Reads a float from a register + * @param reg the register to read + * @return the float value stored in that register + */ + + private float readFloat(Register reg){ + return byteArrayToFloat(deviceClient.read(reg.bVal,4),ByteOrder.LITTLE_ENDIAN); + } + + + /** + * Converts a float to a byte array + * @param value the float array to convert + * @return the byte array converted from the float + */ + private byte [] floatToByteArray (float value, ByteOrder byteOrder) { + return ByteBuffer.allocate(4).order(byteOrder).putFloat(value).array(); + } + + /** + * Writes a byte array to a register on the i2c device + * @param reg the register to write to + * @param bytes the byte array to write + */ + private void writeByteArray (Register reg, byte[] bytes){ + deviceClient.write(reg.bVal,bytes); + } + + /** + * Writes a float to a register on the i2c device + * @param reg the register to write to + * @param f the float to write + */ + private void writeFloat (Register reg, float f){ + byte[] bytes = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN).putFloat(f).array(); + deviceClient.write(reg.bVal,bytes); + } + + /** + * Looks up the DeviceStatus enum corresponding with an int value + * @param s int to lookup + * @return the Odometry Computer state + */ + private DeviceStatus lookupStatus (int s){ + if ((s & DeviceStatus.CALIBRATING.status) != 0){ + return DeviceStatus.CALIBRATING; + } + boolean xPodDetected = (s & DeviceStatus.FAULT_X_POD_NOT_DETECTED.status) == 0; + boolean yPodDetected = (s & DeviceStatus.FAULT_Y_POD_NOT_DETECTED.status) == 0; + + if(!xPodDetected && !yPodDetected){ + return DeviceStatus.FAULT_NO_PODS_DETECTED; + } + if (!xPodDetected){ + return DeviceStatus.FAULT_X_POD_NOT_DETECTED; + } + if (!yPodDetected){ + return DeviceStatus.FAULT_Y_POD_NOT_DETECTED; + } + if ((s & DeviceStatus.FAULT_IMU_RUNAWAY.status) != 0){ + return DeviceStatus.FAULT_IMU_RUNAWAY; + } + if ((s & DeviceStatus.READY.status) != 0){ + return DeviceStatus.READY; + } + else { + return DeviceStatus.NOT_READY; + } + } + + /** + * Call this once per loop to read new data from the Odometry Computer. Data will only update once this is called. + */ + public void update(){ + byte[] bArr = deviceClient.read(Register.BULK_READ.bVal, 40); + deviceStatus = byteArrayToInt(Arrays.copyOfRange (bArr, 0, 4), ByteOrder.LITTLE_ENDIAN); + loopTime = byteArrayToInt(Arrays.copyOfRange (bArr, 4, 8), ByteOrder.LITTLE_ENDIAN); + xEncoderValue = byteArrayToInt(Arrays.copyOfRange (bArr, 8, 12), ByteOrder.LITTLE_ENDIAN); + yEncoderValue = byteArrayToInt(Arrays.copyOfRange (bArr, 12,16), ByteOrder.LITTLE_ENDIAN); + xPosition = byteArrayToFloat(Arrays.copyOfRange(bArr, 16,20), ByteOrder.LITTLE_ENDIAN); + yPosition = byteArrayToFloat(Arrays.copyOfRange(bArr, 20,24), ByteOrder.LITTLE_ENDIAN); + hOrientation = byteArrayToFloat(Arrays.copyOfRange(bArr, 24,28), ByteOrder.LITTLE_ENDIAN); + xVelocity = byteArrayToFloat(Arrays.copyOfRange(bArr, 28,32), ByteOrder.LITTLE_ENDIAN); + yVelocity = byteArrayToFloat(Arrays.copyOfRange(bArr, 32,36), ByteOrder.LITTLE_ENDIAN); + hVelocity = byteArrayToFloat(Arrays.copyOfRange(bArr, 36,40), ByteOrder.LITTLE_ENDIAN); + } + + /** + * Call this once per loop to read new data from the Odometry Computer. This is an override of the update() function + * which allows a narrower range of data to be read from the device for faster read times. Currently ONLY_UPDATE_HEADING + * is supported. + * @param data GoBildaPinpointDriver.readData.ONLY_UPDATE_HEADING + */ + public void update(readData data) { + if (data == readData.ONLY_UPDATE_HEADING) { + hOrientation = byteArrayToFloat(deviceClient.read(Register.H_ORIENTATION.bVal, 4), ByteOrder.LITTLE_ENDIAN); + } + } + + /** + * Sets the odometry pod positions relative to the point that the odometry computer tracks around.

+ * The most common tracking position is the center of the robot.

+ * The X pod offset refers to how far sideways (in mm) from the tracking point the X (forward) odometry pod is. Left of the center is a positive number, right of center is a negative number.
+ * the Y pod offset refers to how far forwards (in mm) from the tracking point the Y (strafe) odometry pod is. forward of center is a positive number, backwards is a negative number.
+ * @param xOffset how sideways from the center of the robot is the X (forward) pod? Left increases + * @param yOffset how far forward from the center of the robot is the Y (Strafe) pod? forward increases + */ + public void setOffsets(double xOffset, double yOffset){ + writeFloat(Register.X_POD_OFFSET, (float) xOffset); + writeFloat(Register.Y_POD_OFFSET, (float) yOffset); + } + + /** + * Recalibrates the Odometry Computer's internal IMU.

+ * Robot MUST be stationary

+ * Device takes a large number of samples, and uses those as the gyroscope zero-offset. This takes approximately 0.25 seconds. + */ + public void recalibrateIMU(){writeInt(Register.DEVICE_CONTROL,1<<0);} + + /** + * Resets the current position to 0,0,0 and recalibrates the Odometry Computer's internal IMU.

+ * Robot MUST be stationary

+ * Device takes a large number of samples, and uses those as the gyroscope zero-offset. This takes approximately 0.25 seconds. + */ + public void resetPosAndIMU(){writeInt(Register.DEVICE_CONTROL,1<<1);} + + /** + * Can reverse the direction of each encoder. + * @param xEncoder FORWARD or REVERSED, X (forward) pod should increase when the robot is moving forward + * @param yEncoder FORWARD or REVERSED, Y (strafe) pod should increase when the robot is moving left + */ + public void setEncoderDirections(EncoderDirection xEncoder, EncoderDirection yEncoder){ + if (xEncoder == EncoderDirection.FORWARD){ + writeInt(Register.DEVICE_CONTROL,1<<5); + } + if (xEncoder == EncoderDirection.REVERSED) { + writeInt(Register.DEVICE_CONTROL,1<<4); + } + + if (yEncoder == EncoderDirection.FORWARD){ + writeInt(Register.DEVICE_CONTROL,1<<3); + } + if (yEncoder == EncoderDirection.REVERSED){ + writeInt(Register.DEVICE_CONTROL,1<<2); + } + } + + /** + * If you're using goBILDA odometry pods, the ticks-per-mm values are stored here for easy access.

+ * @param pods goBILDA_SWINGARM_POD or goBILDA_4_BAR_POD + */ + public void setEncoderResolution(GoBildaOdometryPods pods){ + if (pods == GoBildaOdometryPods.goBILDA_SWINGARM_POD) { + writeByteArray(Register.MM_PER_TICK, (floatToByteArray(goBILDA_SWINGARM_POD, ByteOrder.LITTLE_ENDIAN))); + } + if (pods == GoBildaOdometryPods.goBILDA_4_BAR_POD){ + writeByteArray(Register.MM_PER_TICK,(floatToByteArray(goBILDA_4_BAR_POD, ByteOrder.LITTLE_ENDIAN))); + } + } + + /** + * Sets the encoder resolution in ticks per mm of the odometry pods.
+ * You can find this number by dividing the counts-per-revolution of your encoder by the circumference of the wheel. + * @param ticks_per_mm should be somewhere between 10 ticks/mm and 100 ticks/mm a goBILDA Swingarm pod is ~13.26291192 + */ + public void setEncoderResolution(double ticks_per_mm){ + writeByteArray(Register.MM_PER_TICK,(floatToByteArray((float) ticks_per_mm,ByteOrder.LITTLE_ENDIAN))); + } + + /** + * Tuning this value should be unnecessary.
+ * The goBILDA Odometry Computer has a per-device tuned yaw offset already applied when you receive it.

+ * This is a scalar that is applied to the gyro's yaw value. Increasing it will mean it will report more than one degree for every degree the sensor fusion algorithm measures.

+ * You can tune this variable by rotating the robot a large amount (10 full turns is a good starting place) and comparing the amount that the robot rotated to the amount measured. + * Rotating the robot exactly 10 times should measure 3600°. If it measures more or less, divide moved amount by the measured amount and apply that value to the Yaw Offset.

+ * If you find that to get an accurate heading number you need to apply a scalar of more than 1.05, or less than 0.95, your device may be bad. Please reach out to tech@gobilda.com + * @param yawOffset A scalar for the robot's heading. + */ + public void setYawScalar(double yawOffset){ + writeByteArray(Register.YAW_SCALAR,(floatToByteArray((float) yawOffset, ByteOrder.LITTLE_ENDIAN))); + } + + /** + * Send a position that the Pinpoint should use to track your robot relative to. You can use this to + * update the estimated position of your robot with new external sensor data, or to run a robot + * in field coordinates.

+ * This overrides the current position.

+ * Using this feature to track your robot's position in field coordinates:
+ * When you start your code, send a Pose2D that describes the starting position on the field of your robot.
+ * Say you're on the red alliance, your robot is against the wall and closer to the audience side, + * and the front of your robot is pointing towards the center of the field. + * You can send a setPosition with something like -600mm x, -1200mm Y, and 90 degrees. The pinpoint would then always + * keep track of how far away from the center of the field you are.

+ * Using this feature to update your position with additional sensors:
+ * Some robots have a secondary way to locate their robot on the field. This is commonly + * Apriltag localization in FTC, but it can also be something like a distance sensor. + * Often these external sensors are absolute (meaning they measure something about the field) + * so their data is very accurate. But they can be slower to read, or you may need to be in a very specific + * position on the field to use them. In that case, spend most of your time relying on the Pinpoint + * to determine your location. Then when you pull a new position from your secondary sensor, + * send a setPosition command with the new position. The Pinpoint will then track your movement + * relative to that new, more accurate position. + * @param pos a Pose2D describing the robot's new position. + */ + public Pose2D setPosition(Pose2D pos){ + writeByteArray(Register.X_POSITION,(floatToByteArray((float) pos.getX(DistanceUnit.MM), ByteOrder.LITTLE_ENDIAN))); + writeByteArray(Register.Y_POSITION,(floatToByteArray((float) pos.getY(DistanceUnit.MM),ByteOrder.LITTLE_ENDIAN))); + writeByteArray(Register.H_ORIENTATION,(floatToByteArray((float) pos.getHeading(AngleUnit.RADIANS),ByteOrder.LITTLE_ENDIAN))); + return pos; + } + + /** + * Checks the deviceID of the Odometry Computer. Should return 1. + * @return 1 if device is functional. + */ + public int getDeviceID(){return readInt(Register.DEVICE_ID);} + + /** + * @return the firmware version of the Odometry Computer + */ + public int getDeviceVersion(){return readInt(Register.DEVICE_VERSION); } + + public float getYawScalar(){return readFloat(Register.YAW_SCALAR); } + + /** + * Device Status stores any faults the Odometry Computer may be experiencing. These faults include: + * @return one of the following states:
+ * NOT_READY - The device is currently powering up. And has not initialized yet. RED LED
+ * READY - The device is currently functioning as normal. GREEN LED
+ * CALIBRATING - The device is currently recalibrating the gyro. RED LED
+ * FAULT_NO_PODS_DETECTED - the device does not detect any pods plugged in. PURPLE LED
+ * FAULT_X_POD_NOT_DETECTED - The device does not detect an X pod plugged in. BLUE LED
+ * FAULT_Y_POD_NOT_DETECTED - The device does not detect a Y pod plugged in. ORANGE LED
+ */ + public DeviceStatus getDeviceStatus(){return lookupStatus(deviceStatus); } + + /** + * Checks the Odometry Computer's most recent loop time.

+ * If values less than 500, or more than 1100 are commonly seen here, there may be something wrong with your device. Please reach out to tech@gobilda.com + * @return loop time in microseconds (1/1,000,000 seconds) + */ + public int getLoopTime(){return loopTime; } + + /** + * Checks the Odometry Computer's most recent loop frequency.

+ * If values less than 900, or more than 2000 are commonly seen here, there may be something wrong with your device. Please reach out to tech@gobilda.com + * @return Pinpoint Frequency in Hz (loops per second), + */ + public double getFrequency(){ + if (loopTime != 0){ + return 1000000.0/loopTime; + } + else { + return 0; + } + } + + /** + * @return the raw value of the X (forward) encoder in ticks + */ + public int getEncoderX(){return xEncoderValue; } + + /** + * @return the raw value of the Y (strafe) encoder in ticks + */ + public int getEncoderY(){return yEncoderValue; } + + /** + * @return the estimated X (forward) position of the robot in mm + */ + public double getPosX(){return xPosition; } + + /** + * @return the estimated Y (Strafe) position of the robot in mm + */ + public double getPosY(){return yPosition; } + + /** + * @return the estimated H (heading) position of the robot in Radians + */ + public double getHeading(){return hOrientation;} + + /** + * @return the estimated X (forward) velocity of the robot in mm/sec + */ + public double getVelX(){return xVelocity; } + + /** + * @return the estimated Y (strafe) velocity of the robot in mm/sec + */ + public double getVelY(){return yVelocity; } + + /** + * @return the estimated H (heading) velocity of the robot in radians/sec + */ + public double getHeadingVelocity(){return hVelocity; } + + /** + * This uses its own I2C read, avoid calling this every loop. + * @return the user-set offset for the X (forward) pod + */ + public float getXOffset(){return readFloat(Register.X_POD_OFFSET);} + + /** + * This uses its own I2C read, avoid calling this every loop. + * @return the user-set offset for the Y (strafe) pod + */ + public float getYOffset(){return readFloat(Register.Y_POD_OFFSET);} + + /** + * @return a Pose2D containing the estimated position of the robot + */ + public Pose2D getPosition(){ + return new Pose2D(DistanceUnit.MM, + xPosition, + yPosition, + AngleUnit.RADIANS, + hOrientation); + } + + + + /** + * @return a Pose2D containing the estimated velocity of the robot, velocity is unit per second + */ + public Pose2D getVelocity(){ + return new Pose2D(DistanceUnit.MM, + xVelocity, + yVelocity, + AngleUnit.RADIANS, + hVelocity); + } + + + +} \ No newline at end of file diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Localizer.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Localizer.java index 8f9687f..64710ac 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Localizer.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Localizer.java @@ -1,5 +1,7 @@ package org.firstinspires.ftc.teamcode.pedroPathing.localization; +import com.qualcomm.robotcore.hardware.IMU; + import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; /** @@ -90,5 +92,14 @@ public abstract class Localizer { /** * This resets the IMU of the localizer, if applicable. */ - public abstract void resetIMU(); + public abstract void resetIMU() throws InterruptedException; + + /** + * This is overridden to return the IMU, if there is one. + * + * @return returns the IMU if it exists + */ + public IMU getIMU() { + return null; + } } diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Pose.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Pose.java index d5ac720..d784c09 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Pose.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Pose.java @@ -1,5 +1,7 @@ package org.firstinspires.ftc.teamcode.pedroPathing.localization; +import androidx.annotation.NonNull; + import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions; import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; @@ -44,7 +46,7 @@ public class Pose { * This creates a new Pose with no inputs and 0 for all values. */ public Pose() { - this(0,0,0); + this(0, 0, 0); } /** @@ -208,4 +210,10 @@ public class Pose { public Pose copy() { return new Pose(getX(), getY(), getHeading()); } + + @NonNull + @Override + public String toString() { + return "(" + getX() + ", " + getY() + ", " + Math.toDegrees(getHeading()) + ")"; + } } diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/PoseUpdater.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/PoseUpdater.java index 94428cd..b718320 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/PoseUpdater.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/PoseUpdater.java @@ -5,7 +5,9 @@ import com.qualcomm.robotcore.hardware.HardwareMap; import com.qualcomm.robotcore.hardware.IMU; import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.ThreeWheelIMULocalizer; import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.ThreeWheelLocalizer; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.TwoWheelLocalizer; import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions; import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; @@ -58,6 +60,7 @@ public class PoseUpdater { } this.localizer = localizer; + imu = localizer.getIMU(); } /** @@ -259,9 +262,10 @@ public class PoseUpdater { */ public Vector getVelocity() { if (currentVelocity == null) { - currentVelocity = new Vector(); - currentVelocity.setOrthogonalComponents(getPose().getX() - previousPose.getX(), getPose().getY() - previousPose.getY()); - currentVelocity.setMagnitude(MathFunctions.distance(getPose(), previousPose) / ((currentPoseTime - previousPoseTime) / Math.pow(10.0, 9))); +// currentVelocity = new Vector(); +// currentVelocity.setOrthogonalComponents(getPose().getX() - previousPose.getX(), getPose().getY() - previousPose.getY()); +// currentVelocity.setMagnitude(MathFunctions.distance(getPose(), previousPose) / ((currentPoseTime - previousPoseTime) / Math.pow(10.0, 9))); + currentVelocity = localizer.getVelocityVector(); return MathFunctions.copyVector(currentVelocity); } else { return MathFunctions.copyVector(currentVelocity); @@ -298,7 +302,9 @@ public class PoseUpdater { * This resets the heading of the robot to the IMU's heading, using Road Runner's pose reset. */ public void resetHeadingToIMU() { - localizer.setPose(new Pose(getPose().getX(), getPose().getY(), getNormalizedIMUHeading() + startingPose.getHeading())); + if (imu != null) { + localizer.setPose(new Pose(getPose().getX(), getPose().getY(), getNormalizedIMUHeading() + startingPose.getHeading())); + } } /** @@ -307,7 +313,9 @@ public class PoseUpdater { * method. */ public void resetHeadingToIMUWithOffsets() { - setCurrentPoseWithOffset(new Pose(getPose().getX(), getPose().getY(), getNormalizedIMUHeading() + startingPose.getHeading())); + if (imu != null) { + setCurrentPoseWithOffset(new Pose(getPose().getX(), getPose().getY(), getNormalizedIMUHeading() + startingPose.getHeading())); + } } /** @@ -316,7 +324,10 @@ public class PoseUpdater { * @return returns the normalized IMU heading. */ public double getNormalizedIMUHeading() { - return MathFunctions.normalizeAngle(-imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS)); + if (imu != null) { + return MathFunctions.normalizeAngle(-imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS)); + } + return 0; } /** @@ -340,7 +351,7 @@ public class PoseUpdater { /** * */ - public void resetIMU() { + public void resetIMU() throws InterruptedException { localizer.resetIMU(); } } diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/DriveEncoderLocalizer.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/DriveEncoderLocalizer.java new file mode 100644 index 0000000..5f3290a --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/DriveEncoderLocalizer.java @@ -0,0 +1,272 @@ +package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers; + +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName; + +import com.acmerobotics.dashboard.config.Config; +import com.qualcomm.robotcore.hardware.DcMotorEx; +import com.qualcomm.robotcore.hardware.HardwareMap; + +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; +import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer; + +/** + * This is the DriveEncoderLocalizer class. This class extends the Localizer superclass and is a + * localizer that uses the drive encoder set up. + * + * @author Anyi Lin - 10158 Scott's Bots + * @version 1.0, 4/2/2024 + */ +@Config +public class DriveEncoderLocalizer extends Localizer { + private HardwareMap hardwareMap; + private Pose startPose; + private Pose displacementPose; + private Pose currentVelocity; + private Matrix prevRotationMatrix; + private NanoTimer timer; + private long deltaTimeNano; + private Encoder leftFront; + private Encoder rightFront; + private Encoder leftRear; + private Encoder rightRear; + private double totalHeading; + public static double FORWARD_TICKS_TO_INCHES = 1; + public static double STRAFE_TICKS_TO_INCHES = 1; + public static double TURN_TICKS_TO_RADIANS = 1; + public static double ROBOT_WIDTH = 1; + public static double ROBOT_LENGTH = 1; + + /** + * This creates a new DriveEncoderLocalizer from a HardwareMap, with a starting Pose at (0,0) + * facing 0 heading. + * + * @param map the HardwareMap + */ + public DriveEncoderLocalizer(HardwareMap map) { + this(map, new Pose()); + } + + /** + * This creates a new DriveEncoderLocalizer from a HardwareMap and a Pose, with the Pose + * specifying the starting pose of the localizer. + * + * @param map the HardwareMap + * @param setStartPose the Pose to start from + */ + public DriveEncoderLocalizer(HardwareMap map, Pose setStartPose) { + hardwareMap = map; + + leftFront = new Encoder(hardwareMap.get(DcMotorEx.class, leftFrontMotorName)); + leftRear = new Encoder(hardwareMap.get(DcMotorEx.class, leftRearMotorName)); + rightRear = new Encoder(hardwareMap.get(DcMotorEx.class, rightRearMotorName)); + rightFront = new Encoder(hardwareMap.get(DcMotorEx.class, rightFrontMotorName)); + + // TODO: reverse any encoders necessary + leftFront.setDirection(Encoder.REVERSE); + leftRear.setDirection(Encoder.REVERSE); + rightFront.setDirection(Encoder.FORWARD); + rightRear.setDirection(Encoder.FORWARD); + + setStartPose(setStartPose); + timer = new NanoTimer(); + deltaTimeNano = 1; + displacementPose = new Pose(); + currentVelocity = new Pose(); + } + + /** + * This returns the current pose estimate. + * + * @return returns the current pose estimate as a Pose + */ + @Override + public Pose getPose() { + return MathFunctions.addPoses(startPose, displacementPose); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Pose + */ + @Override + public Pose getVelocity() { + return currentVelocity.copy(); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Vector + */ + @Override + public Vector getVelocityVector() { + return currentVelocity.getVector(); + } + + /** + * This sets the start pose. Changing the start pose should move the robot as if all its + * previous movements were displacing it from its new start pose. + * + * @param setStart the new start pose + */ + @Override + public void setStartPose(Pose setStart) { + startPose = setStart; + } + + /** + * This sets the Matrix that contains the previous pose's heading rotation. + * + * @param heading the rotation of the Matrix + */ + public void setPrevRotationMatrix(double heading) { + prevRotationMatrix = new Matrix(3,3); + prevRotationMatrix.set(0, 0, Math.cos(heading)); + prevRotationMatrix.set(0, 1, -Math.sin(heading)); + prevRotationMatrix.set(1, 0, Math.sin(heading)); + prevRotationMatrix.set(1, 1, Math.cos(heading)); + prevRotationMatrix.set(2, 2, 1.0); + } + + /** + * This sets the current pose estimate. Changing this should just change the robot's current + * pose estimate, not anything to do with the start pose. + * + * @param setPose the new current pose estimate + */ + @Override + public void setPose(Pose setPose) { + displacementPose = MathFunctions.subtractPoses(setPose, startPose); + resetEncoders(); + } + + /** + * This updates the elapsed time timer that keeps track of time between updates, as well as the + * change position of the Encoders. Then, the robot's global change in position is calculated + * using the pose exponential method. + */ + @Override + public void update() { + deltaTimeNano = timer.getElapsedTime(); + timer.resetTimer(); + + updateEncoders(); + Matrix robotDeltas = getRobotDeltas(); + Matrix globalDeltas; + setPrevRotationMatrix(getPose().getHeading()); + + Matrix transformation = new Matrix(3,3); + if (Math.abs(robotDeltas.get(2, 0)) < 0.001) { + transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0)); + transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0); + transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0); + transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0)); + transformation.set(2, 2, 1.0); + } else { + transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0)); + transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0)); + transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0)); + transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0)); + transformation.set(2, 2, 1.0); + } + + globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas); + + displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0))); + currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano / Math.pow(10.0, 9))); + + totalHeading += globalDeltas.get(2, 0); + } + + /** + * This updates the Encoders. + */ + public void updateEncoders() { + leftFront.update(); + rightFront.update(); + leftRear.update(); + rightRear.update(); + } + + /** + * This resets the Encoders. + */ + public void resetEncoders() { + leftFront.reset(); + rightFront.reset(); + leftRear.reset(); + rightRear.reset(); + } + + /** + * This calculates the change in position from the perspective of the robot using information + * from the Encoders. + * + * @return returns a Matrix containing the robot relative movement. + */ + public Matrix getRobotDeltas() { + Matrix returnMatrix = new Matrix(3,1); + // x/forward movement + returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * (leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() + rightRear.getDeltaPosition())); + //y/strafe movement + returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() - rightRear.getDeltaPosition())); + // theta/turning + returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() - leftRear.getDeltaPosition() + rightRear.getDeltaPosition()) / (ROBOT_WIDTH + ROBOT_LENGTH))); + return returnMatrix; + } + + /** + * This returns how far the robot has turned in radians, in a number not clamped between 0 and + * 2 * pi radians. This is used for some tuning things and nothing actually within the following. + * + * @return returns how far the robot has turned in total, in radians. + */ + public double getTotalHeading() { + return totalHeading; + } + + /** + * This returns the multiplier applied to forward movement measurement to convert from encoder + * ticks to inches. This is found empirically through a tuner. + * + * @return returns the forward ticks to inches multiplier + */ + public double getForwardMultiplier() { + return FORWARD_TICKS_TO_INCHES; + } + + /** + * This returns the multiplier applied to lateral/strafe movement measurement to convert from + * encoder ticks to inches. This is found empirically through a tuner. + * + * @return returns the lateral/strafe ticks to inches multiplier + */ + public double getLateralMultiplier() { + return STRAFE_TICKS_TO_INCHES; + } + + /** + * This returns the multiplier applied to turning movement measurement to convert from encoder + * ticks to radians. This is found empirically through a tuner. + * + * @return returns the turning ticks to radians multiplier + */ + public double getTurningMultiplier() { + return TURN_TICKS_TO_RADIANS; + } + + /** + * This does nothing since this localizer does not use the IMU. + */ + public void resetIMU() { + } +} diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/OTOSLocalizer.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/OTOSLocalizer.java new file mode 100644 index 0000000..0f309a5 --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/OTOSLocalizer.java @@ -0,0 +1,227 @@ +package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers; + +import com.qualcomm.hardware.sparkfun.SparkFunOTOS; +import com.qualcomm.robotcore.hardware.HardwareMap; + +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; + +/** + * This is the OTOSLocalizer class. This class extends the Localizer superclass and is a + * localizer that uses the SparkFun OTOS. The diagram below, which is modified from + * Road Runner, shows a typical set up. + * + * The view is from the top of the robot looking downwards. + * + * left on robot is the y positive direction + * + * forward on robot is the x positive direction + * + * forward (x positive) + * △ + * | + * | + * /--------------\ + * | | + * | | + * | || || | + * left (y positive) <--- | || || | + * | ____ | + * | ---- | + * \--------------/ + * + * @author Anyi Lin - 10158 Scott's Bots + * @version 1.0, 7/20/2024 + */ +public class OTOSLocalizer extends Localizer { + private HardwareMap hardwareMap; + private Pose startPose; + private SparkFunOTOS otos; + private SparkFunOTOS.Pose2D otosPose; + private SparkFunOTOS.Pose2D otosVel; + private SparkFunOTOS.Pose2D otosAcc; + private double previousHeading; + private double totalHeading; + + /** + * This creates a new OTOSLocalizer from a HardwareMap, with a starting Pose at (0,0) + * facing 0 heading. + * + * @param map the HardwareMap + */ + public OTOSLocalizer(HardwareMap map) { + this(map, new Pose()); + } + + /** + * This creates a new OTOSLocalizer from a HardwareMap and a Pose, with the Pose + * specifying the starting pose of the localizer. + * + * @param map the HardwareMap + * @param setStartPose the Pose to start from + */ + public OTOSLocalizer(HardwareMap map, Pose setStartPose) { + hardwareMap = map; + + /* + TODO: If you want to use the "SparkFunOTOSCorrected" version of OTOS, then replace the + 'SparkFunOTOS.class' below with 'SparkFunOTOSCorrected.class' and set the OTOS as a + "SparkFunOTOS Corrected" in your robot config + */ + // TODO: replace this with your OTOS port + otos = hardwareMap.get(SparkFunOTOS.class, "sensor_otos"); + + otos.setLinearUnit(DistanceUnit.INCH); + otos.setAngularUnit(AngleUnit.RADIANS); + + // TODO: replace this with your OTOS offset from the center of the robot + // For the OTOS, left/right is the y axis and forward/backward is the x axis, with left being + // positive y and forward being positive x. PI/2 radians is facing forward, and clockwise + // rotation is negative rotation. + otos.setOffset(new SparkFunOTOS.Pose2D(0,0,Math.PI / 2)); + + // TODO: replace these with your tuned multipliers + otos.setLinearScalar(1.0); + otos.setAngularScalar(1.0); + + otos.calibrateImu(); + otos.resetTracking(); + + setStartPose(setStartPose); + otosPose = new SparkFunOTOS.Pose2D(); + otosVel = new SparkFunOTOS.Pose2D(); + otosAcc = new SparkFunOTOS.Pose2D(); + totalHeading = 0; + previousHeading = startPose.getHeading(); + + resetOTOS(); + } + + /** + * This returns the current pose estimate. + * + * @return returns the current pose estimate as a Pose + */ + @Override + public Pose getPose() { + Pose pose = new Pose(otosPose.x, otosPose.y, otosPose.h); + + Vector vec = pose.getVector(); + vec.rotateVector(startPose.getHeading()); + + return MathFunctions.addPoses(startPose, new Pose(vec.getXComponent(), vec.getYComponent(), pose.getHeading())); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Pose + */ + @Override + public Pose getVelocity() { + return new Pose(otosVel.x, otosVel.y, otosVel.h); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Vector + */ + @Override + public Vector getVelocityVector() { + return getVelocity().getVector(); + } + + /** + * This sets the start pose. Changing the start pose should move the robot as if all its + * previous movements were displacing it from its new start pose. + * + * @param setStart the new start pose + */ + @Override + public void setStartPose(Pose setStart) { + startPose = setStart; + } + + /** + * This sets the current pose estimate. Changing this should just change the robot's current + * pose estimate, not anything to do with the start pose. + * + * @param setPose the new current pose estimate + */ + @Override + public void setPose(Pose setPose) { + resetOTOS(); + Pose setOTOSPose = MathFunctions.subtractPoses(setPose, startPose); + otos.setPosition(new SparkFunOTOS.Pose2D(setOTOSPose.getX(), setOTOSPose.getY(), setOTOSPose.getHeading())); + } + + /** + * This updates the total heading of the robot. The OTOS handles all other updates itself. + */ + @Override + public void update() { + otos.getPosVelAcc(otosPose,otosVel,otosAcc); + totalHeading += MathFunctions.getSmallestAngleDifference(otosPose.h, previousHeading); + previousHeading = otosPose.h; + } + + /** + * This resets the OTOS. + */ + public void resetOTOS() { + otos.resetTracking(); + } + + /** + * This returns how far the robot has turned in radians, in a number not clamped between 0 and + * 2 * pi radians. This is used for some tuning things and nothing actually within the following. + * + * @return returns how far the robot has turned in total, in radians. + */ + public double getTotalHeading() { + return totalHeading; + } + + /** + * This returns the multiplier applied to forward movement measurement to convert from OTOS + * ticks to inches. For the OTOS, this value is the same as the lateral multiplier. + * This is found empirically through a tuner. + * + * @return returns the forward ticks to inches multiplier + */ + public double getForwardMultiplier() { + return otos.getLinearScalar(); + } + + /** + * This returns the multiplier applied to lateral/strafe movement measurement to convert from + * OTOS ticks to inches. For the OTOS, this value is the same as the forward multiplier. + * This is found empirically through a tuner. + * + * @return returns the lateral/strafe ticks to inches multiplier + */ + public double getLateralMultiplier() { + return otos.getLinearScalar(); + } + + /** + * This returns the multiplier applied to turning movement measurement to convert from OTOS ticks + * to radians. This is found empirically through a tuner. + * + * @return returns the turning ticks to radians multiplier + */ + public double getTurningMultiplier() { + return otos.getAngularScalar(); + } + + /** + * This does nothing since this localizer does not use the IMU. + */ + public void resetIMU() { + } +} diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/PinpointLocalizer.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/PinpointLocalizer.java new file mode 100644 index 0000000..d3d3fda --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/PinpointLocalizer.java @@ -0,0 +1,245 @@ +package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers; + + +import com.qualcomm.robotcore.hardware.HardwareMap; + +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; +import org.firstinspires.ftc.robotcore.external.navigation.Pose2D; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.GoBildaPinpointDriver; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; +import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer; +import org.opencv.core.Mat; + +/** + * This is the Pinpoint class. This class extends the Localizer superclass and is a + * localizer that uses the two wheel odometry set up with the IMU to have more accurate heading + * readings. The diagram below, which is modified from Road Runner, shows a typical set up. + * + * The view is from the top of the robot looking downwards. + * + * left on robot is the y positive direction + * + * forward on robot is the x positive direction + * + * /--------------\ + * | ____ | + * | ---- | + * | || | + * | || | ----> left (y positive) + * | | + * | | + * \--------------/ + * | + * | + * V + * forward (x positive) + * With the pinpoint your readings will be used in mm + * to use inches ensure to divide your mm value by 25.4 + * @author Logan Nash + * @author Havish Sripada 12808 - RevAmped Robotics + * @author Ethan Doak - Gobilda + * @version 1.0, 10/2/2024 + */ +public class PinpointLocalizer extends Localizer { + private HardwareMap hardwareMap; + private GoBildaPinpointDriver odo; + private double previousHeading; + private double totalHeading; + private Pose startPose; + private long deltaTimeNano; + private NanoTimer timer; + private Pose currentVelocity; + private Pose previousPinpointPose; + + /** + * This creates a new PinpointLocalizer from a HardwareMap, with a starting Pose at (0,0) + * facing 0 heading. + * + * @param map the HardwareMap + */ + public PinpointLocalizer(HardwareMap map){ this(map, new Pose());} + + /** + * This creates a new PinpointLocalizer from a HardwareMap and a Pose, with the Pose + * specifying the starting pose of the localizer. + * + * @param map the HardwareMap + * @param setStartPose the Pose to start from + */ + public PinpointLocalizer(HardwareMap map, Pose setStartPose){ + hardwareMap = map; + + odo = hardwareMap.get(GoBildaPinpointDriver.class,"pinpoint"); + + //The default units are inches, but you can swap the units if you wish. + //If you have already tuned the TwoWheelLocalizer, you can simply use the forwardEncoderPose's y value and strafeEncoderPose's x values. + setOffsets(-2.815, 0.125, DistanceUnit.INCH); //these are tuned for 3110-0002-0001 Product Insight #1 + + //TODO: Tune urself if needed +// odo.setYawScalar(1.0); + + odo.setEncoderResolution(GoBildaPinpointDriver.GoBildaOdometryPods.goBILDA_4_BAR_POD); + //odo.setEncoderResolution(13.26291192); + + odo.setEncoderDirections(GoBildaPinpointDriver.EncoderDirection.REVERSED, GoBildaPinpointDriver.EncoderDirection.FORWARD); + + resetPinpoint(); + + setStartPose(setStartPose); + totalHeading = 0; + timer = new NanoTimer(); + previousPinpointPose = new Pose(); + currentVelocity = new Pose(); + deltaTimeNano = 1; + previousHeading = setStartPose.getHeading(); + } + + /** + * This returns the current pose estimate. + * + * @return returns the current pose estimate as a Pose + */ + @Override + public Pose getPose() { + return MathFunctions.addPoses(startPose, MathFunctions.rotatePose(previousPinpointPose, startPose.getHeading(), false)); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Pose + */ + @Override + public Pose getVelocity() { + return currentVelocity.copy(); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Vector + */ + @Override + public Vector getVelocityVector() { + return currentVelocity.getVector(); + } + + /** + * This sets the start pose. Since nobody should be using this after the robot has begun moving, + * and due to issues with the PinpointLocalizer, this is functionally the same as setPose(Pose). + * + * @param setStart the new start pose + */ + @Override + public void setStartPose(Pose setStart) { + this.startPose = setStart; + } + + /** + * This sets the current pose estimate. Changing this should just change the robot's current + * pose estimate, not anything to do with the start pose. + * + * @param setPose the new current pose estimate + */ + @Override + public void setPose(Pose setPose) { + Pose setNewPose = MathFunctions.subtractPoses(setPose, startPose); + odo.setPosition(new Pose2D(DistanceUnit.INCH, setNewPose.getX(), setNewPose.getY(), AngleUnit.RADIANS, setNewPose.getHeading())); + } + + /** + * This updates the total heading of the robot. The Pinpoint handles all other updates itself. + */ + @Override + public void update() { + deltaTimeNano = timer.getElapsedTime(); + timer.resetTimer(); + odo.update(); + Pose2D pinpointPose = odo.getPosition(); + Pose currentPinpointPose = new Pose(pinpointPose.getX(DistanceUnit.INCH), pinpointPose.getY(DistanceUnit.INCH), pinpointPose.getHeading(AngleUnit.RADIANS)); + totalHeading += MathFunctions.getSmallestAngleDifference(currentPinpointPose.getHeading(), previousHeading); + previousHeading = currentPinpointPose.getHeading(); + Pose deltaPose = MathFunctions.subtractPoses(currentPinpointPose, previousPinpointPose); + currentVelocity = new Pose(deltaPose.getX() / (deltaTimeNano / Math.pow(10.0, 9)), deltaPose.getY() / (deltaTimeNano / Math.pow(10.0, 9)), deltaPose.getHeading() / (deltaTimeNano / Math.pow(10.0, 9))); + previousPinpointPose = currentPinpointPose; + } + + /** + * This returns how far the robot has turned in radians, in a number not clamped between 0 and + * 2 * pi radians. This is used for some tuning things and nothing actually within the following. + * + * @return returns how far the robot has turned in total, in radians. + */ + @Override + public double getTotalHeading() { + return totalHeading; + } + + /** + * This returns the Y encoder value as none of the odometry tuners are required for this localizer + * @return returns the Y encoder value + */ + @Override + public double getForwardMultiplier() { + return odo.getEncoderY(); + } + + /** + * This returns the X encoder value as none of the odometry tuners are required for this localizer + * @return returns the X encoder value + */ + @Override + public double getLateralMultiplier() { + return odo.getEncoderX(); + } + + /** + * This returns either the factory tuned yaw scalar or the yaw scalar tuned by yourself. + * @return returns the yaw scalar + */ + @Override + public double getTurningMultiplier() { + return odo.getYawScalar(); + } + + /** + * This sets the offsets and converts inches to millimeters + * @param xOffset How far to the side from the center of the robot is the x-pod? Use positive values if it's to the left and negative if it's to the right. + * @param yOffset How far forward from the center of the robot is the y-pod? Use positive values if it's forward and negative if it's to the back. + * @param unit The units that the measurements are given in + */ + private void setOffsets(double xOffset, double yOffset, DistanceUnit unit) { + odo.setOffsets(unit.toMm(xOffset), unit.toMm(yOffset)); + } + + /** + * This resets the IMU. Does not change heading estimation. + */ + @Override + public void resetIMU() throws InterruptedException { + odo.recalibrateIMU(); + + try { + Thread.sleep(300); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + /** + * This resets the pinpoint. + */ + private void resetPinpoint() { + odo.resetPosAndIMU(); + + try { + Thread.sleep(300); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } +} diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/RRToPedroThreeWheelLocalizer.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/RRToPedroThreeWheelLocalizer.java new file mode 100644 index 0000000..15275f0 --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/RRToPedroThreeWheelLocalizer.java @@ -0,0 +1,159 @@ +//package org.firstinspires.ftc.teamcode.pedroPathing.localization; +// +//import com.acmerobotics.roadrunner.geometry.Pose2d; +//import com.qualcomm.robotcore.hardware.HardwareMap; +// +//import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions; +//import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; +// +//import java.util.ArrayList; +//import java.util.List; +// +///** +// * This is the RRToPedroThreeWheelLocalizer class. This class extends the Localizer superclass and +// * is intended to adapt the old Road Runner three wheel odometry localizer to the new Pedro Pathing +// * localizer system. +// * +// * @author Anyi Lin - 10158 Scott's Bots +// * @version 1.0, 5/9/2024 +// */ +//public class RRToPedroThreeWheelLocalizer extends Localizer { +// private RoadRunnerThreeWheelLocalizer localizer; +// private double totalHeading; +// private Pose startPose; +// private Pose previousPose; +// +// /** +// * This creates a new RRToPedroThreeWheelLocalizer from a HardwareMap. This adapts the previously +// * used Road Runner localization system to the new Pedro Pathing localization system. +// * +// * @param hardwareMap the HardwareMap +// */ +// public RRToPedroThreeWheelLocalizer(HardwareMap hardwareMap) { +// List lastTrackingEncPositions = new ArrayList<>(); +// List lastTrackingEncVels = new ArrayList<>(); +// +// localizer = new RoadRunnerThreeWheelLocalizer(hardwareMap, lastTrackingEncPositions, lastTrackingEncVels); +// +// startPose = new Pose(); +// previousPose = new Pose(); +// } +// +// /** +// * This returns the current pose estimate as a Pose. +// * +// * @return returns the current pose estimate +// */ +// @Override +// public Pose getPose() { +// Pose2d pose = localizer.getPoseEstimate(); +// return new Pose(pose.getX(), pose.getY(), pose.getHeading()); +// } +// +// /** +// * This returns the current velocity estimate as a Pose. +// * +// * @return returns the current velocity estimate +// */ +// @Override +// public Pose getVelocity() { +// Pose2d pose = localizer.getPoseVelocity(); +// return new Pose(pose.getX(), pose.getY(), pose.getHeading()); +// } +// +// /** +// * This returns the current velocity estimate as a Vector. +// * +// * @return returns the current velocity estimate +// */ +// @Override +// public Vector getVelocityVector() { +// Pose2d pose = localizer.getPoseVelocity(); +// Vector returnVector = new Vector(); +// returnVector.setOrthogonalComponents(pose.getX(), pose.getY()); +// return returnVector; +// } +// +// /** +// * This sets the start pose. Any movement of the robot is treated as a displacement from the +// * start pose, so moving the start pose will move the current pose estimate the same amount. +// * +// * @param setStart the new start pose +// */ +// @Override +// public void setStartPose(Pose setStart) { +// Pose oldStart = startPose; +// startPose = setStart; +// Pose startDiff = MathFunctions.subtractPoses(startPose, oldStart); +// localizer.setPoseEstimate(new Pose2d(getPose().getX() + startDiff.getX(), getPose().getY() + startDiff.getY(), getPose().getHeading() + startDiff.getHeading())); +// } +// +// /** +// * This sets the current pose estimate. This has no effect on the start pose. +// * +// * @param setPose the new current pose estimate +// */ +// @Override +// public void setPose(Pose setPose) { +// localizer.setPoseEstimate(new Pose2d(setPose.getX(), setPose.getY(), setPose.getHeading())); +// } +// +// /** +// * This updates the total heading and previous pose estimate. Everything else is handled by the +// * Road Runner localizer on its own, but updating this tells you how far the robot has really +// * turned. +// */ +// @Override +// public void update() { +// totalHeading += MathFunctions.getTurnDirection(previousPose.getHeading(), getPose().getHeading()) * MathFunctions.getSmallestAngleDifference(previousPose.getHeading(), getPose().getHeading()); +// previousPose = getPose(); +// } +// +// /** +// * This returns how far the robot has actually turned. +// * +// * @return returns the total angle turned, in degrees. +// */ +// @Override +// public double getTotalHeading() { +// return totalHeading; +// } +// +// /** +// * This returns the forward multiplier of the Road Runner localizer, which converts from ticks +// * to inches. You can actually use the tuners in Pedro Pathing to find the value that everything +// * multiplied together should be. If you do use that, then do be aware that the value returned is +// * the product of the Road Runner ticks to inches and the x multiplier. +// * +// * @return returns the forward multiplier +// */ +// @Override +// public double getForwardMultiplier() { +// return RoadRunnerThreeWheelLocalizer.encoderTicksToInches(1) * RoadRunnerThreeWheelLocalizer.X_MULTIPLIER; +// } +// +// /** +// * This returns the lateral multiplier of the Road Runner localizer, which converts from ticks +// * to inches. You can actually use the tuners in Pedro Pathing to find the value that everything +// * multiplied together should be. If you do use that, then do be aware that the value returned is +// * the product of the Road Runner ticks to inches and the y multiplier. +// * +// * @return returns the lateral multiplier +// */ +// @Override +// public double getLateralMultiplier() { +// return RoadRunnerThreeWheelLocalizer.encoderTicksToInches(1) * RoadRunnerThreeWheelLocalizer.Y_MULTIPLIER; +// } +// +// /** +// * This returns the turning multiplier of the Road Runner localizer, which doesn't actually exist. +// * There really isn't a point in tuning the turning for the Road Runner localizer. This will +// * actually just return the average of the two other multipliers. +// * +// * @return returns the turning multiplier +// */ +// @Override +// public double getTurningMultiplier() { +// return (getForwardMultiplier() + getLateralMultiplier()) / 2; +// } +//} diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/RoadRunnerEncoder.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/RoadRunnerEncoder.java new file mode 100644 index 0000000..d743b49 --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/RoadRunnerEncoder.java @@ -0,0 +1,132 @@ +//package org.firstinspires.ftc.teamcode.pedroPathing.localization; +// +//import com.acmerobotics.roadrunner.util.NanoClock; +//import com.qualcomm.robotcore.hardware.DcMotorEx; +//import com.qualcomm.robotcore.hardware.DcMotorSimple; +// +///** +// * This class is adapted from the Road Runner Encoder class. Later, this will be replaced with a +// * custom encoder class. According to Road Runner, this wraps a motor instance to provide corrected +// * velocity counts and allow reversing independently of the corresponding slot's motor direction. +// * +// * I'm fairly sure I didn't make any changes to this class, just copied it so I wouldn't have to have +// * import statements, so I'm not crediting myself as an author for this. +// * +// * @author Road Runner dev team +// * @version 1.0, 5/9/2024 +// */ +//public class RoadRunnerEncoder { +// private final static int CPS_STEP = 0x10000; +// +// private static double inverseOverflow(double input, double estimate) { +// // convert to uint16 +// int real = (int) input & 0xffff; +// // initial, modulo-based correction: it can recover the remainder of 5 of the upper 16 bits +// // because the velocity is always a multiple of 20 cps due to Expansion Hub's 50ms measurement window +// real += ((real % 20) / 4) * CPS_STEP; +// // estimate-based correction: it finds the nearest multiple of 5 to correct the upper bits by +// real += Math.round((estimate - real) / (5 * CPS_STEP)) * 5 * CPS_STEP; +// return real; +// } +// +// public enum Direction { +// FORWARD(1), +// REVERSE(-1); +// +// private int multiplier; +// +// Direction(int multiplier) { +// this.multiplier = multiplier; +// } +// +// public int getMultiplier() { +// return multiplier; +// } +// } +// +// private DcMotorEx motor; +// private NanoClock clock; +// +// private Direction direction; +// +// private int lastPosition; +// private int velocityEstimateIdx; +// private double[] velocityEstimates; +// private double lastUpdateTime; +// +// public RoadRunnerEncoder(DcMotorEx motor, NanoClock clock) { +// this.motor = motor; +// this.clock = clock; +// +// this.direction = Direction.FORWARD; +// +// this.lastPosition = 0; +// this.velocityEstimates = new double[3]; +// this.lastUpdateTime = clock.seconds(); +// } +// +// public RoadRunnerEncoder(DcMotorEx motor) { +// this(motor, NanoClock.system()); +// } +// +// public Direction getDirection() { +// return direction; +// } +// +// private int getMultiplier() { +// return getDirection().getMultiplier() * (motor.getDirection() == DcMotorSimple.Direction.FORWARD ? 1 : -1); +// } +// +// /** +// * Allows you to set the direction of the counts and velocity without modifying the motor's direction state +// * @param direction either reverse or forward depending on if encoder counts should be negated +// */ +// public void setDirection(Direction direction) { +// this.direction = direction; +// } +// +// /** +// * Gets the position from the underlying motor and adjusts for the set direction. +// * Additionally, this method updates the velocity estimates used for compensated velocity +// * +// * @return encoder position +// */ +// public int getCurrentPosition() { +// int multiplier = getMultiplier(); +// int currentPosition = motor.getCurrentPosition() * multiplier; +// if (currentPosition != lastPosition) { +// double currentTime = clock.seconds(); +// double dt = currentTime - lastUpdateTime; +// velocityEstimates[velocityEstimateIdx] = (currentPosition - lastPosition) / dt; +// velocityEstimateIdx = (velocityEstimateIdx + 1) % 3; +// lastPosition = currentPosition; +// lastUpdateTime = currentTime; +// } +// return currentPosition; +// } +// +// /** +// * Gets the velocity directly from the underlying motor and compensates for the direction +// * See {@link #getCorrectedVelocity} for high (>2^15) counts per second velocities (such as on REV Through Bore) +// * +// * @return raw velocity +// */ +// public double getRawVelocity() { +// int multiplier = getMultiplier(); +// return motor.getVelocity() * multiplier; +// } +// +// /** +// * Uses velocity estimates gathered in {@link #getCurrentPosition} to estimate the upper bits of velocity +// * that are lost in overflow due to velocity being transmitted as 16 bits. +// * CAVEAT: must regularly call {@link #getCurrentPosition} for the compensation to work correctly. +// * +// * @return corrected velocity +// */ +// public double getCorrectedVelocity() { +// double median = velocityEstimates[0] > velocityEstimates[1] +// ? Math.max(velocityEstimates[1], Math.min(velocityEstimates[0], velocityEstimates[2])) +// : Math.max(velocityEstimates[0], Math.min(velocityEstimates[1], velocityEstimates[2])); +// return inverseOverflow(getRawVelocity(), median); +// } +//} diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/RoadRunnerThreeWheelLocalizer.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/RoadRunnerThreeWheelLocalizer.java new file mode 100644 index 0000000..2ee75a9 --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/RoadRunnerThreeWheelLocalizer.java @@ -0,0 +1,123 @@ +//package org.firstinspires.ftc.teamcode.pedroPathing.localization; +// +//import androidx.annotation.NonNull; +// +//import com.acmerobotics.dashboard.config.Config; +//import com.acmerobotics.roadrunner.geometry.Pose2d; +//import com.acmerobotics.roadrunner.localization.ThreeTrackingWheelLocalizer; +//import com.qualcomm.robotcore.hardware.DcMotorEx; +//import com.qualcomm.robotcore.hardware.HardwareMap; +// +//import java.util.Arrays; +//import java.util.List; +// +///* +// * Sample tracking wheel localizer implementation assuming the standard configuration: +// * +// * left on robot is y pos +// * +// * front of robot is x pos +// * +// * /--------------\ +// * | ____ | +// * | ---- | +// * | || || | +// * | || || | +// * | | +// * | | +// * \--------------/ +// * +// */ +// +///** +// * This class is adapted from the Road Runner StandardTrackingWheelLocalizer class. Later, this will +// * be replaced with a custom localizer. I made some minor changes, so I'm crediting myself as an +// * 'author' of sorts, but really this is pretty much Road Runner's code, just moved to be local to +// * Pedro Pathing to avoid having imports. +// * +// * @author Road Runner dev team +// * @author Anyi Lin - 10158 Scott's Bots +// * @version 1.0, 5/9/2024 +// */ +//@Config +//public class RoadRunnerThreeWheelLocalizer extends ThreeTrackingWheelLocalizer { +// public static double TICKS_PER_REV = 8192; +// public static double WHEEL_RADIUS = 1.37795; // in +// public static double GEAR_RATIO = 1; // output (wheel) speed / input (encoder) speed +// +// public static double X_MULTIPLIER = 0.5008239963; +// public static double Y_MULTIPLIER = 0.5018874659; +// +// public static double leftX = -18.5/25.4 - 0.1, leftY = 164.4/25.4, rightX = -18.4/25.4 - 0.1, rightY = -159.6/25.4, strafeX = -107.9/25.4+0.25, strafeY = -1.1/25.4-0.23; +// +// private RoadRunnerEncoder leftEncoder, rightEncoder, strafeEncoder; +// +// private List lastEncPositions, lastEncVels; +// +// public RoadRunnerThreeWheelLocalizer(HardwareMap hardwareMap, List lastTrackingEncPositions, List lastTrackingEncVels) { +// super(Arrays.asList( +// new Pose2d(leftX, leftY, 0), // left +// new Pose2d(rightX, rightY, 0), // right +// new Pose2d(strafeX, strafeY, Math.toRadians(90)) // strafe +// )); +// +// lastEncPositions = lastTrackingEncPositions; +// lastEncVels = lastTrackingEncVels; +// +// // TODO: redo the configs here +// leftEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "leftRear")); +// rightEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "rightFront")); +// strafeEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "strafeEncoder")); +// +// // TODO: reverse any encoders using Encoder.setDirection(Encoder.Direction.REVERSE) +// leftEncoder.setDirection(RoadRunnerEncoder.Direction.REVERSE); +// rightEncoder.setDirection(RoadRunnerEncoder.Direction.REVERSE); +// strafeEncoder.setDirection(RoadRunnerEncoder.Direction.FORWARD); +// } +// +// public void resetHeading(double heading) { +// setPoseEstimate(new Pose2d(getPoseEstimate().getX(), getPoseEstimate().getY(), heading)); +// } +// +// public static double encoderTicksToInches(double ticks) { +// return WHEEL_RADIUS * 2 * Math.PI * GEAR_RATIO * ticks / TICKS_PER_REV; +// } +// +// @NonNull +// @Override +// public List getWheelPositions() { +// int leftPos = leftEncoder.getCurrentPosition(); +// int rightPos = rightEncoder.getCurrentPosition(); +// int frontPos = strafeEncoder.getCurrentPosition(); +// +// lastEncPositions.clear(); +// lastEncPositions.add(leftPos); +// lastEncPositions.add(rightPos); +// lastEncPositions.add(frontPos); +// +// return Arrays.asList( +// encoderTicksToInches(leftPos) * X_MULTIPLIER, +// encoderTicksToInches(rightPos) * X_MULTIPLIER, +// encoderTicksToInches(frontPos) * Y_MULTIPLIER +// ); +// } +// +// @NonNull +// @Override +// public List getWheelVelocities() { +// int leftVel = (int) leftEncoder.getCorrectedVelocity(); +// int rightVel = (int) rightEncoder.getCorrectedVelocity(); +// int frontVel = (int) strafeEncoder.getCorrectedVelocity(); +// +// lastEncVels.clear(); +// lastEncVels.add(leftVel); +// lastEncVels.add(rightVel); +// lastEncVels.add(frontVel); +// +// return Arrays.asList( +// encoderTicksToInches(leftVel) * X_MULTIPLIER, +// encoderTicksToInches(rightVel) * X_MULTIPLIER, +// encoderTicksToInches(frontVel) * Y_MULTIPLIER +// ); +// } +//} diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/ThreeWheelIMULocalizer.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/ThreeWheelIMULocalizer.java new file mode 100644 index 0000000..83c172e --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/ThreeWheelIMULocalizer.java @@ -0,0 +1,326 @@ +package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers; + +import com.acmerobotics.dashboard.config.Config; +import com.qualcomm.hardware.rev.RevHubOrientationOnRobot; +import com.qualcomm.robotcore.hardware.DcMotorEx; +import com.qualcomm.robotcore.hardware.HardwareMap; +import com.qualcomm.robotcore.hardware.IMU; + +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; +import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer; + +/** + * This is the ThreeWheelIMULocalizer class. This class extends the Localizer superclass and is a + * localizer that uses the three wheel odometry set up with the IMU to have more accurate heading + * readings. The diagram below, which is modified from Road Runner, shows a typical set up. + * + * The view is from the top of the robot looking downwards. + * + * left on robot is the y positive direction + * + * forward on robot is the x positive direction + * + * forward (x positive) + * △ + * | + * | + * /--------------\ + * | | + * | | + * | || || | + * left (y positive) <--- | || || | + * | ____ | + * | ---- | + * \--------------/ + * + * @author Logan Nash + * @author Anyi Lin - 10158 Scott's Bots + * @version 1.0, 7/9/2024 + */ +@Config +public class ThreeWheelIMULocalizer extends Localizer { + private HardwareMap hardwareMap; + private Pose startPose; + private Pose displacementPose; + private Pose currentVelocity; + private Matrix prevRotationMatrix; + private NanoTimer timer; + private long deltaTimeNano; + private Encoder leftEncoder; + private Encoder rightEncoder; + private Encoder strafeEncoder; + private Pose leftEncoderPose; + private Pose rightEncoderPose; + private Pose strafeEncoderPose; + + public final IMU imu; + private double previousIMUOrientation; + private double deltaRadians; + private double totalHeading; + public static double FORWARD_TICKS_TO_INCHES = 0.002957;//8192 * 1.37795 * 2 * Math.PI * 0.5008239963; + public static double STRAFE_TICKS_TO_INCHES = -0.003127403096038503;//8192 * 1.37795 * 2 * Math.PI * 0.5018874659; + public static double TURN_TICKS_TO_RADIANS = 0.002995;//8192 * 1.37795 * 2 * Math.PI * 0.5; + + public static boolean useIMU = true; + + /** + * This creates a new ThreeWheelIMULocalizer from a HardwareMap, with a starting Pose at (0,0) + * facing 0 heading. + * + * @param map the HardwareMap + */ + public ThreeWheelIMULocalizer(HardwareMap map) { + this(map, new Pose()); + } + + /** + * This creates a new ThreeWheelIMULocalizer from a HardwareMap and a Pose, with the Pose + * specifying the starting pose of the localizer. + * + * @param map the HardwareMap + * @param setStartPose the Pose to start from + */ + public ThreeWheelIMULocalizer(HardwareMap map, Pose setStartPose) { + hardwareMap = map; + imu = hardwareMap.get(IMU.class, "imu"); + + // TODO: replace this with your IMU's orientation + imu.initialize(new IMU.Parameters(new RevHubOrientationOnRobot(RevHubOrientationOnRobot.LogoFacingDirection.LEFT, RevHubOrientationOnRobot.UsbFacingDirection.UP))); + + // TODO: replace these with your encoder positions + leftEncoderPose = new Pose(-3, 5.7, 0); + rightEncoderPose = new Pose(-3, -5.7, 0); + strafeEncoderPose = new Pose(6.9, 1, Math.toRadians(90)); + + + // TODO: replace these with your encoder ports + leftEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "lb")); + rightEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "lf")); + strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "rf")); + + // TODO: reverse any encoders necessary + leftEncoder.setDirection(Encoder.REVERSE); + rightEncoder.setDirection(Encoder.FORWARD); + strafeEncoder.setDirection(Encoder.FORWARD); + + setStartPose(setStartPose); + timer = new NanoTimer(); + deltaTimeNano = 1; + displacementPose = new Pose(); + currentVelocity = new Pose(); + totalHeading = 0; + + resetEncoders(); + } + + /** + * This returns the current pose estimate. + * + * @return returns the current pose estimate as a Pose + */ + @Override + public Pose getPose() { + return MathFunctions.addPoses(startPose, displacementPose); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Pose + */ + @Override + public Pose getVelocity() { + return currentVelocity.copy(); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Vector + */ + @Override + public Vector getVelocityVector() { + return currentVelocity.getVector(); + } + + /** + * This sets the start pose. Changing the start pose should move the robot as if all its + * previous movements were displacing it from its new start pose. + * + * @param setStart the new start pose + */ + @Override + public void setStartPose(Pose setStart) { + startPose = setStart; + } + + /** + * This sets the Matrix that contains the previous pose's heading rotation. + * + * @param heading the rotation of the Matrix + */ + public void setPrevRotationMatrix(double heading) { + prevRotationMatrix = new Matrix(3,3); + prevRotationMatrix.set(0, 0, Math.cos(heading)); + prevRotationMatrix.set(0, 1, -Math.sin(heading)); + prevRotationMatrix.set(1, 0, Math.sin(heading)); + prevRotationMatrix.set(1, 1, Math.cos(heading)); + prevRotationMatrix.set(2, 2, 1.0); + } + + /** + * This sets the current pose estimate. Changing this should just change the robot's current + * pose estimate, not anything to do with the start pose. + * + * @param setPose the new current pose estimate + */ + @Override + public void setPose(Pose setPose) { + displacementPose = MathFunctions.subtractPoses(setPose, startPose); + resetEncoders(); + } + + /** + * This updates the elapsed time timer that keeps track of time between updates, as well as the + * change position of the Encoders. Then, the robot's global change in position is calculated + * using the pose exponential method. + */ + @Override + public void update() { + deltaTimeNano = timer.getElapsedTime(); + timer.resetTimer(); + + updateEncoders(); + Matrix robotDeltas = getRobotDeltas(); + Matrix globalDeltas; + setPrevRotationMatrix(getPose().getHeading()); + + Matrix transformation = new Matrix(3,3); + if (Math.abs(robotDeltas.get(2, 0)) < 0.001) { + transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0)); + transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0); + transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0); + transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0)); + transformation.set(2, 2, 1.0); + } else { + transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0)); + transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0)); + transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0)); + transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0)); + transformation.set(2, 2, 1.0); + } + + globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas); + + displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0))); + currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano / Math.pow(10.0, 9))); + + totalHeading += globalDeltas.get(2, 0); + } + + /** + * This updates the Encoders. + */ + public void updateEncoders() { + leftEncoder.update(); + rightEncoder.update(); + strafeEncoder.update(); + + double currentIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS)); + deltaRadians = MathFunctions.getTurnDirection(previousIMUOrientation, currentIMUOrientation) * MathFunctions.getSmallestAngleDifference(currentIMUOrientation, previousIMUOrientation); + previousIMUOrientation = currentIMUOrientation; + } + + /** + * This resets the Encoders. + */ + public void resetEncoders() { + leftEncoder.reset(); + rightEncoder.reset(); + strafeEncoder.reset(); + } + + /** + * This calculates the change in position from the perspective of the robot using information + * from the Encoders. + * + * @return returns a Matrix containing the robot relative movement. + */ + public Matrix getRobotDeltas() { + Matrix returnMatrix = new Matrix(3,1); + // x/forward movement + returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * ((rightEncoder.getDeltaPosition() * leftEncoderPose.getY() - leftEncoder.getDeltaPosition() * rightEncoderPose.getY()) / (leftEncoderPose.getY() - rightEncoderPose.getY()))); + //y/strafe movement + returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY())))); + // theta/turning + if (MathFunctions.getSmallestAngleDifference(0, deltaRadians) > 0.00005 && useIMU) { + returnMatrix.set(2, 0, deltaRadians); + } else { + returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY()))); + } + return returnMatrix; + } + + /** + * This returns how far the robot has turned in radians, in a number not clamped between 0 and + * 2 * pi radians. This is used for some tuning things and nothing actually within the following. + * + * @return returns how far the robot has turned in total, in radians. + */ + public double getTotalHeading() { + return totalHeading; + } + + /** + * This returns the multiplier applied to forward movement measurement to convert from encoder + * ticks to inches. This is found empirically through a tuner. + * + * @return returns the forward ticks to inches multiplier + */ + public double getForwardMultiplier() { + return FORWARD_TICKS_TO_INCHES; + } + + /** + * This returns the multiplier applied to lateral/strafe movement measurement to convert from + * encoder ticks to inches. This is found empirically through a tuner. + * + * @return returns the lateral/strafe ticks to inches multiplier + */ + public double getLateralMultiplier() { + return STRAFE_TICKS_TO_INCHES; + } + + /** + * This returns the multiplier applied to turning movement measurement to convert from encoder + * ticks to radians. This is found empirically through a tuner. + * + * @return returns the turning ticks to radians multiplier + */ + public double getTurningMultiplier() { + return TURN_TICKS_TO_RADIANS; + } + + /** + * This resets the IMU. + */ + public void resetIMU() { + imu.resetYaw(); + } + + /** + * This is returns the IMU. + * + * @return returns the IMU + */ + @Override + public IMU getIMU() { + return imu; + } +} diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/ThreeWheelLocalizer.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/ThreeWheelLocalizer.java index 4b149f7..d5e1347 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/ThreeWheelLocalizer.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/ThreeWheelLocalizer.java @@ -25,18 +25,18 @@ import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer; * * forward on robot is the x positive direction * - * /--------------\ - * | ____ | - * | ---- | - * | || || | - * | || || | ----> left (y positive) - * | | - * | | - * \--------------/ - * | - * | - * V - * forward (x positive) + * forward (x positive) + * △ + * | + * | + * /--------------\ + * | | + * | | + * | || || | + * left (y positive) <--- | || || | + * | ____ | + * | ---- | + * \--------------/ * * @author Anyi Lin - 10158 Scott's Bots * @version 1.0, 4/2/2024 @@ -206,7 +206,7 @@ public class ThreeWheelLocalizer extends Localizer { globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas); displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0))); - currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano * Math.pow(10.0, 9))); + currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano / Math.pow(10.0, 9))); totalHeading += globalDeltas.get(2, 0); } diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/TwoWheelLocalizer.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/TwoWheelLocalizer.java new file mode 100644 index 0000000..72cc63b --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/TwoWheelLocalizer.java @@ -0,0 +1,309 @@ +package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers; + +import com.acmerobotics.dashboard.config.Config; +import com.qualcomm.hardware.rev.RevHubOrientationOnRobot; +import com.qualcomm.robotcore.hardware.DcMotorEx; +import com.qualcomm.robotcore.hardware.HardwareMap; +import com.qualcomm.robotcore.hardware.IMU; + +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; +import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer; + +/** + * This is the TwoWheelLocalizer class. This class extends the Localizer superclass and is a + * localizer that uses the two wheel odometry with IMU set up. The diagram below, which is modified from + * Road Runner, shows a typical set up. + * + * The view is from the top of the robot looking downwards. + * + * left on robot is the y positive direction + * + * forward on robot is the x positive direction + * +* forward (x positive) + * △ + * | + * | + * /--------------\ + * | | + * | | + * | || | + * left (y positive) <--- | || | + * | ____ | + * | ---- | + * \--------------/ + * + * @author Anyi Lin - 10158 Scott's Bots + * @version 1.0, 4/2/2024 + */ +@Config +public class TwoWheelLocalizer extends Localizer { // todo: make two wheel odo work + private HardwareMap hardwareMap; + private IMU imu; + private Pose startPose; + private Pose displacementPose; + private Pose currentVelocity; + private Matrix prevRotationMatrix; + private NanoTimer timer; + private long deltaTimeNano; + private Encoder forwardEncoder; + private Encoder strafeEncoder; + private Pose forwardEncoderPose; + private Pose strafeEncoderPose; + private double previousIMUOrientation; + private double deltaRadians; + private double totalHeading; + public static double FORWARD_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5008239963; + public static double STRAFE_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5018874659; + + /** + * This creates a new TwoWheelLocalizer from a HardwareMap, with a starting Pose at (0,0) + * facing 0 heading. + * + * @param map the HardwareMap + */ + public TwoWheelLocalizer(HardwareMap map) { + this(map, new Pose()); + } + + /** + * This creates a new TwoWheelLocalizer from a HardwareMap and a Pose, with the Pose + * specifying the starting pose of the localizer. + * + * @param map the HardwareMap + * @param setStartPose the Pose to start from + */ + public TwoWheelLocalizer(HardwareMap map, Pose setStartPose) { + // TODO: replace these with your encoder positions + forwardEncoderPose = new Pose(-18.5/25.4 - 0.1, 164.4/25.4, 0); + strafeEncoderPose = new Pose(-107.9/25.4+0.25, -1.1/25.4-0.23, Math.toRadians(90)); + + hardwareMap = map; + + imu = hardwareMap.get(IMU.class, "imu"); + // TODO: replace this with your IMU's orientation + imu.initialize(new IMU.Parameters(new RevHubOrientationOnRobot(RevHubOrientationOnRobot.LogoFacingDirection.UP, RevHubOrientationOnRobot.UsbFacingDirection.LEFT))); + + // TODO: replace these with your encoder ports + forwardEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "leftRear")); + strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "strafeEncoder")); + + // TODO: reverse any encoders necessary + forwardEncoder.setDirection(Encoder.REVERSE); + strafeEncoder.setDirection(Encoder.FORWARD); + + setStartPose(setStartPose); + timer = new NanoTimer(); + deltaTimeNano = 1; + displacementPose = new Pose(); + currentVelocity = new Pose(); + + previousIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS)); + deltaRadians = 0; + } + + /** + * This returns the current pose estimate. + * + * @return returns the current pose estimate as a Pose + */ + @Override + public Pose getPose() { + return MathFunctions.addPoses(startPose, displacementPose); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Pose + */ + @Override + public Pose getVelocity() { + return currentVelocity.copy(); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Vector + */ + @Override + public Vector getVelocityVector() { + return currentVelocity.getVector(); + } + + /** + * This sets the start pose. Changing the start pose should move the robot as if all its + * previous movements were displacing it from its new start pose. + * + * @param setStart the new start pose + */ + @Override + public void setStartPose(Pose setStart) { + startPose = setStart; + } + + /** + * This sets the Matrix that contains the previous pose's heading rotation. + * + * @param heading the rotation of the Matrix + */ + public void setPrevRotationMatrix(double heading) { + prevRotationMatrix = new Matrix(3,3); + prevRotationMatrix.set(0, 0, Math.cos(heading)); + prevRotationMatrix.set(0, 1, -Math.sin(heading)); + prevRotationMatrix.set(1, 0, Math.sin(heading)); + prevRotationMatrix.set(1, 1, Math.cos(heading)); + prevRotationMatrix.set(2, 2, 1.0); + } + + /** + * This sets the current pose estimate. Changing this should just change the robot's current + * pose estimate, not anything to do with the start pose. + * + * @param setPose the new current pose estimate + */ + @Override + public void setPose(Pose setPose) { + displacementPose = MathFunctions.subtractPoses(setPose, startPose); + resetEncoders(); + } + + /** + * This updates the elapsed time timer that keeps track of time between updates, as well as the + * change position of the Encoders and the IMU readings. Then, the robot's global change in + * position is calculated using the pose exponential method. + */ + @Override + public void update() { + deltaTimeNano = timer.getElapsedTime(); + timer.resetTimer(); + + updateEncoders(); + Matrix robotDeltas = getRobotDeltas(); + Matrix globalDeltas; + setPrevRotationMatrix(getPose().getHeading()); + + Matrix transformation = new Matrix(3,3); + if (Math.abs(robotDeltas.get(2, 0)) < 0.001) { + transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0)); + transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0); + transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0); + transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0)); + transformation.set(2, 2, 1.0); + } else { + transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0)); + transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0)); + transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0)); + transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0)); + transformation.set(2, 2, 1.0); + } + + globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas); + + displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0))); + currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano / Math.pow(10.0, 9))); + + totalHeading += globalDeltas.get(2, 0); + } + + /** + * This updates the Encoders as well as the IMU. + */ + public void updateEncoders() { + forwardEncoder.update(); + strafeEncoder.update(); + + double currentIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS)); + deltaRadians = MathFunctions.getTurnDirection(previousIMUOrientation, currentIMUOrientation) * MathFunctions.getSmallestAngleDifference(currentIMUOrientation, previousIMUOrientation); + previousIMUOrientation = currentIMUOrientation; + } + + /** + * This resets the Encoders. + */ + public void resetEncoders() { + forwardEncoder.reset(); + strafeEncoder.reset(); + } + + /** + * This calculates the change in position from the perspective of the robot using information + * from the Encoders and IMU. + * + * @return returns a Matrix containing the robot relative movement. + */ + public Matrix getRobotDeltas() { + Matrix returnMatrix = new Matrix(3,1); + // x/forward movement + returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * (forwardEncoder.getDeltaPosition() - forwardEncoderPose.getY() * deltaRadians)); + //y/strafe movement + returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * deltaRadians)); + // theta/turning + returnMatrix.set(2,0, deltaRadians); + return returnMatrix; + } + + /** + * This returns how far the robot has turned in radians, in a number not clamped between 0 and + * 2 * pi radians. This is used for some tuning things and nothing actually within the following. + * + * @return returns how far the robot has turned in total, in radians. + */ + public double getTotalHeading() { + return totalHeading; + } + + /** + * This returns the multiplier applied to forward movement measurement to convert from encoder + * ticks to inches. This is found empirically through a tuner. + * + * @return returns the forward ticks to inches multiplier + */ + public double getForwardMultiplier() { + return FORWARD_TICKS_TO_INCHES; + } + + /** + * This returns the multiplier applied to lateral/strafe movement measurement to convert from + * encoder ticks to inches. This is found empirically through a tuner. + * + * @return returns the lateral/strafe ticks to inches multiplier + */ + public double getLateralMultiplier() { + return STRAFE_TICKS_TO_INCHES; + } + + /** + * This returns the multiplier applied to turning movement measurement to convert from encoder + * ticks to radians. This is found empirically through a tuner. + * + * @return returns the turning ticks to radians multiplier + */ + public double getTurningMultiplier() { + return 1; + } + + /** + * This resets the IMU. + */ + public void resetIMU() { + imu.resetYaw(); + } + + /** + * This is returns the IMU. + * + * @return returns the IMU + */ + @Override + public IMU getIMU() { + return imu; + } +} diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/TwoWheelPinpointIMULocalizer.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/TwoWheelPinpointIMULocalizer.java new file mode 100644 index 0000000..9858caa --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/TwoWheelPinpointIMULocalizer.java @@ -0,0 +1,321 @@ +package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers; + +import com.acmerobotics.dashboard.config.Config; +import com.qualcomm.robotcore.hardware.DcMotorEx; +import com.qualcomm.robotcore.hardware.HardwareMap; + +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.GoBildaPinpointDriver; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions; +import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; +import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer; + + +/** + * This is the TwoWheelPinpointIMULocalizer class. This class extends the Localizer superclass and is a + * localizer that uses the two wheel odometry with the Pinpoint IMU set up. The diagram below, which is modified from + * Road Runner, shows a typical set up. + * + * The view is from the top of the robot looking downwards. + * + * left on robot is the y positive direction + * + * forward on robot is the x positive direction + * +* forward (x positive) + * △ + * | + * | + * /--------------\ + * | | + * | | + * | || | + * left (y positive) <--- | || | + * | ____ | + * | ---- | + * \--------------/ + * + * @author Anyi Lin - 10158 Scott's Bots + * @author Havish Sripada - 12808 RevAmped Robotics + * @author Baron Henderson - 20077 The Indubitables + * @version 1.0, 11/28/2024 + */ +@Config +public class TwoWheelPinpointIMULocalizer extends Localizer { + private HardwareMap hardwareMap; + private GoBildaPinpointDriver pinpoint; + private Pose startPose; + private Pose displacementPose; + private Pose currentVelocity; + private Matrix prevRotationMatrix; + private NanoTimer timer; + private long deltaTimeNano; + private Encoder forwardEncoder; + private Encoder strafeEncoder; + private Pose forwardEncoderPose; + private Pose strafeEncoderPose; + private double previousHeading; + private double deltaRadians; + private double totalHeading; + public static double FORWARD_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5008239963; + public static double STRAFE_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5018874659; + + /** + * This creates a new TwoWheelLocalizer from a HardwareMap, with a starting Pose at (0,0) + * facing 0 heading. + * + * @param map the HardwareMap + */ + public TwoWheelPinpointIMULocalizer(HardwareMap map) { + this(map, new Pose()); + } + + /** + * This creates a new TwoWheelLocalizer from a HardwareMap and a Pose, with the Pose + * specifying the starting pose of the localizer. + * + * @param map the HardwareMap + * @param setStartPose the Pose to start from + */ + public TwoWheelPinpointIMULocalizer(HardwareMap map, Pose setStartPose) { + // TODO: replace these with your encoder positions + forwardEncoderPose = new Pose(-0.82, 6.47, 0); + strafeEncoderPose = new Pose(-4, -0.273, Math.toRadians(90)); + + hardwareMap = map; + + pinpoint = hardwareMap.get(GoBildaPinpointDriver.class, "pinpoint"); + pinpoint.resetPosAndIMU(); + + // TODO: replace these with your encoder ports + forwardEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "leftRear")); + strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "strafeEncoder")); + + // TODO: reverse any encoders necessary + forwardEncoder.setDirection(Encoder.REVERSE); + strafeEncoder.setDirection(Encoder.FORWARD); + + setStartPose(setStartPose); + timer = new NanoTimer(); + deltaTimeNano = 1; + displacementPose = new Pose(); + currentVelocity = new Pose(); + previousHeading = startPose.getHeading(); + deltaRadians = 0; + } + + /** + * This returns the current pose estimate. + * + * @return returns the current pose estimate as a Pose + */ + @Override + public Pose getPose() { + return new Pose(startPose.getX()+displacementPose.getX(), startPose.getY()+displacementPose.getY(),displacementPose.getHeading()); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Pose + */ + @Override + public Pose getVelocity() { + return currentVelocity.copy(); + } + + /** + * This returns the current velocity estimate. + * + * @return returns the current velocity estimate as a Vector + */ + @Override + public Vector getVelocityVector() { + return currentVelocity.getVector(); + } + + /** + * This sets the start pose. Changing the start pose should move the robot as if all its + * previous movements were displacing it from its new start pose. + * + * @param setStart the new start pose + */ + @Override + public void setStartPose(Pose setStart) { + startPose = setStart; + } + + /** + * This sets the Matrix that contains the previous pose's heading rotation. + * + * @param heading the rotation of the Matrix + */ + public void setPrevRotationMatrix(double heading) { + prevRotationMatrix = new Matrix(3, 3); + prevRotationMatrix.set(0, 0, Math.cos(heading)); + prevRotationMatrix.set(0, 1, -Math.sin(heading)); + prevRotationMatrix.set(1, 0, Math.sin(heading)); + prevRotationMatrix.set(1, 1, Math.cos(heading)); + prevRotationMatrix.set(2, 2, 1.0); + } + + /** + * This sets the current pose estimate. Changing this should just change the robot's current + * pose estimate, not anything to do with the start pose. + * + * @param setPose the new current pose estimate + */ + @Override + public void setPose(Pose setPose) { + displacementPose = MathFunctions.subtractPoses(setPose, startPose); + resetEncoders(); + } + + /** + * This updates the elapsed time timer that keeps track of time between updates, as well as the + * change position of the Encoders and the IMU readings. Then, the robot's global change in + * position is calculated using the pose exponential method. + */ + @Override + public void update() { + deltaTimeNano = timer.getElapsedTime(); + timer.resetTimer(); + + updateEncoders(); + Matrix robotDeltas = getRobotDeltas(); + Matrix globalDeltas; + setPrevRotationMatrix(getPose().getHeading()); + + Matrix transformation = new Matrix(3, 3); + if (Math.abs(robotDeltas.get(2, 0)) < 0.001) { + transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0)); + transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0); + transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0); + transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0)); + transformation.set(2, 2, 1.0); + } else { + transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0)); + transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0)); + transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0)); + transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0)); + transformation.set(2, 2, 1.0); + } + + globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas); + + displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0))); + currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano / Math.pow(10.0, 9))); + + totalHeading += globalDeltas.get(2, 0); + } + + /** + * This updates the Encoders as well as the IMU. + */ + public void updateEncoders() { + forwardEncoder.update(); + strafeEncoder.update(); + + pinpoint.update(GoBildaPinpointDriver.readData.ONLY_UPDATE_HEADING); + double currentHeading = startPose.getHeading() + MathFunctions.normalizeAngle(pinpoint.getHeading()); + deltaRadians = MathFunctions.getTurnDirection(previousHeading, currentHeading) * MathFunctions.getSmallestAngleDifference(currentHeading, previousHeading); + previousHeading = currentHeading; + } + + /** + * This resets the Encoders. + */ + public void resetEncoders() { + forwardEncoder.reset(); + strafeEncoder.reset(); + } + + /** + * This calculates the change in position from the perspective of the robot using information + * from the Encoders and IMU. + * + * @return returns a Matrix containing the robot relative movement. + */ + public Matrix getRobotDeltas() { + Matrix returnMatrix = new Matrix(3, 1); + // x/forward movement + returnMatrix.set(0, 0, FORWARD_TICKS_TO_INCHES * (forwardEncoder.getDeltaPosition() - forwardEncoderPose.getY() * deltaRadians)); + //y/strafe movement + returnMatrix.set(1, 0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * deltaRadians)); + // theta/turning + returnMatrix.set(2, 0, deltaRadians); + return returnMatrix; + } + + /** + * This returns how far the robot has turned in radians, in a number not clamped between 0 and + * 2 * pi radians. This is used for some tuning things and nothing actually within the following. + * + * @return returns how far the robot has turned in total, in radians. + */ + public double getTotalHeading() { + return totalHeading; + } + + /** + * This returns the multiplier applied to forward movement measurement to convert from encoder + * ticks to inches. This is found empirically through a tuner. + * + * @return returns the forward ticks to inches multiplier + */ + public double getForwardMultiplier() { + return FORWARD_TICKS_TO_INCHES; + } + + /** + * This returns the multiplier applied to lateral/strafe movement measurement to convert from + * encoder ticks to inches. This is found empirically through a tuner. + * + * @return returns the lateral/strafe ticks to inches multiplier + */ + public double getLateralMultiplier() { + return STRAFE_TICKS_TO_INCHES; + } + + /** + * This returns the multiplier applied to turning movement measurement to convert from encoder + * ticks to radians. This is found empirically through a tuner. + * + * @return returns the turning ticks to radians multiplier + */ + public double getTurningMultiplier() { + return 1; + } + + /** + * This resets the IMU. + */ + + @Override + public void resetIMU() throws InterruptedException { + pinpoint.recalibrateIMU(); + + try { + Thread.sleep(300); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } + + /** + * This resets the pinpoint. + */ + private void resetPinpoint() throws InterruptedException{ + pinpoint.resetPosAndIMU(); + + try { + Thread.sleep(300); + } catch (InterruptedException e) { + throw new RuntimeException(e); + } + } +} + diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/ForwardTuner.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/ForwardTuner.java new file mode 100644 index 0000000..d876746 --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/ForwardTuner.java @@ -0,0 +1,71 @@ +package org.firstinspires.ftc.teamcode.pedroPathing.localization.tuning; + +import com.acmerobotics.dashboard.FtcDashboard; +import com.acmerobotics.dashboard.config.Config; +import com.acmerobotics.dashboard.telemetry.MultipleTelemetry; +import com.qualcomm.robotcore.eventloop.opmode.Autonomous; +import com.qualcomm.robotcore.eventloop.opmode.OpMode; + +import org.firstinspires.ftc.robotcore.external.Telemetry; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.PoseUpdater; +import org.firstinspires.ftc.teamcode.pedroPathing.util.DashboardPoseTracker; +import org.firstinspires.ftc.teamcode.pedroPathing.util.Drawing; + +/** + * This is the ForwardTuner OpMode. This tracks the forward movement of the robot and displays the + * necessary ticks to inches multiplier. This displayed multiplier is what's necessary to scale the + * robot's current distance in ticks to the specified distance in inches. So, to use this, run the + * tuner, then pull/push the robot to the specified distance using a ruler on the ground. When you're + * at the end of the distance, record the ticks to inches multiplier. Feel free to run multiple trials + * and average the results. Then, input the multiplier into the forward ticks to inches in your + * localizer of choice. + * You can adjust the target distance on FTC Dashboard: 192/168/43/1:8080/dash + * + * @author Anyi Lin - 10158 Scott's Bots + * @version 1.0, 5/6/2024 + */ +@Config +@Autonomous(name = "Forward Localizer Tuner", group = "Autonomous Pathing Tuning") +public class ForwardTuner extends OpMode { + private PoseUpdater poseUpdater; + private DashboardPoseTracker dashboardPoseTracker; + + private Telemetry telemetryA; + + public static double DISTANCE = 48; + + /** + * This initializes the PoseUpdater as well as the FTC Dashboard telemetry. + */ + @Override + public void init() { + poseUpdater = new PoseUpdater(hardwareMap); + + dashboardPoseTracker = new DashboardPoseTracker(poseUpdater); + + telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry()); + telemetryA.addLine("Pull your robot forward " + DISTANCE + " inches. Your forward ticks to inches will be shown on the telemetry."); + telemetryA.update(); + + Drawing.drawRobot(poseUpdater.getPose(), "#4CAF50"); + Drawing.sendPacket(); + } + + /** + * This updates the robot's pose estimate, and updates the FTC Dashboard telemetry with the + * calculated multiplier and draws the robot. + */ + @Override + public void loop() { + poseUpdater.update(); + + telemetryA.addData("distance moved", poseUpdater.getPose().getX()); + telemetryA.addLine("The multiplier will display what your forward ticks to inches should be to scale your current distance to " + DISTANCE + " inches."); + telemetryA.addData("multiplier", DISTANCE / (poseUpdater.getPose().getX() / poseUpdater.getLocalizer().getForwardMultiplier())); + telemetryA.update(); + + Drawing.drawPoseHistory(dashboardPoseTracker, "#4CAF50"); + Drawing.drawRobot(poseUpdater.getPose(), "#4CAF50"); + Drawing.sendPacket(); + } +} diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LateralTuner.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LateralTuner.java index f3d42f3..dfa39dc 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LateralTuner.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LateralTuner.java @@ -33,7 +33,7 @@ public class LateralTuner extends OpMode { private Telemetry telemetryA; - public static double DISTANCE = 30; + public static double DISTANCE = 48; /** * This initializes the PoseUpdater as well as the FTC Dashboard telemetry. diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LocalizationTest.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LocalizationTest.java index 45348c2..8da3c3e 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LocalizationTest.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LocalizationTest.java @@ -1,13 +1,13 @@ package org.firstinspires.ftc.teamcode.pedroPathing.localization.tuning; -import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_LEFT_MOTOR_DIRECTION; -import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_RIGHT_MOTOR_DIRECTION; -import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_LEFT_MOTOR_DIRECTION; -import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_RIGHT_MOTOR_DIRECTION; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection; import com.acmerobotics.dashboard.FtcDashboard; import com.acmerobotics.dashboard.config.Config; @@ -16,7 +16,6 @@ import com.qualcomm.robotcore.eventloop.opmode.OpMode; import com.qualcomm.robotcore.eventloop.opmode.TeleOp; import com.qualcomm.robotcore.hardware.DcMotor; import com.qualcomm.robotcore.hardware.DcMotorEx; -import com.qualcomm.robotcore.hardware.DcMotorSimple; import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType; import org.firstinspires.ftc.robotcore.external.Telemetry; @@ -61,11 +60,10 @@ public class LocalizationTest extends OpMode { leftRear = hardwareMap.get(DcMotorEx.class, leftRearMotorName); rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName); rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName); - - leftFront.setDirection(FRONT_LEFT_MOTOR_DIRECTION); - leftRear.setDirection(BACK_LEFT_MOTOR_DIRECTION); - rightFront.setDirection(FRONT_RIGHT_MOTOR_DIRECTION); - rightRear.setDirection(BACK_RIGHT_MOTOR_DIRECTION); + leftFront.setDirection(leftFrontMotorDirection); + leftRear.setDirection(leftRearMotorDirection); + rightFront.setDirection(rightFrontMotorDirection); + rightRear.setDirection(rightRearMotorDirection); motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear); diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/SensorGoBildaPinpointExample.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/SensorGoBildaPinpointExample.java new file mode 100644 index 0000000..9fbac95 --- /dev/null +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/SensorGoBildaPinpointExample.java @@ -0,0 +1,200 @@ +/* MIT License + * Copyright (c) [2024] [Base 10 Assets, LLC] + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to deal + * in the Software without restriction, including without limitation the rights + * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + * copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + + * The above copyright notice and this permission notice shall be included in all + * copies or substantial portions of the Software. + + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + * SOFTWARE. + */ + +package org.firstinspires.ftc.teamcode.pedroPathing.localization.tuning; + +import com.qualcomm.robotcore.eventloop.opmode.Disabled; +import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode; +import com.qualcomm.robotcore.eventloop.opmode.TeleOp; + +import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; +import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit; +import org.firstinspires.ftc.robotcore.external.navigation.Pose2D; +import org.firstinspires.ftc.teamcode.pedroPathing.localization.GoBildaPinpointDriver; + +import java.util.Locale; + +/* +This opmode shows how to use the goBILDA® Pinpoint Odometry Computer. +The goBILDA Odometry Computer is a device designed to solve the Pose Exponential calculation +commonly associated with Dead Wheel Odometry systems. It reads two encoders, and an integrated +system of senors to determine the robot's current heading, X position, and Y position. + +it uses an ESP32-S3 as a main cpu, with an STM LSM6DSV16X IMU. +It is validated with goBILDA "Dead Wheel" Odometry pods, but should be compatible with any +quadrature rotary encoder. The ESP32 PCNT peripheral is speced to decode quadrature encoder signals +at a maximum of 40mhz per channel. Though the maximum in-application tested number is 130khz. + +The device expects two perpendicularly mounted Dead Wheel pods. The encoder pulses are translated +into mm and their readings are transformed by an "offset", this offset describes how far away +the pods are from the "tracking point", usually the center of rotation of the robot. + +Dead Wheel pods should both increase in count when moved forwards and to the left. +The gyro will report an increase in heading when rotated counterclockwise. + +The Pose Exponential algorithm used is described on pg 181 of this book: +https://github.com/calcmogul/controls-engineering-in-frc + +For support, contact tech@gobilda.com + +-Ethan Doak + */ + +//TODO: If tuning comment out the @Disabled +@TeleOp(name="goBILDA® PinPoint Odometry Example", group="Linear OpMode") +@Disabled + +public class SensorGoBildaPinpointExample extends LinearOpMode { + + GoBildaPinpointDriver odo; // Declare OpMode member for the Odometry Computer + + double oldTime = 0; + + + @Override + public void runOpMode() { + + // Initialize the hardware variables. Note that the strings used here must correspond + // to the names assigned during the robot configuration step on the DS or RC devices. + + odo = hardwareMap.get(GoBildaPinpointDriver.class,"odo"); + + /* + Set the odometry pod positions relative to the point that the odometry computer tracks around. + The X pod offset refers to how far sideways from the tracking point the + X (forward) odometry pod is. Left of the center is a positive number, + right of center is a negative number. the Y pod offset refers to how far forwards from + the tracking point the Y (strafe) odometry pod is. forward of center is a positive number, + backwards is a negative number. + */ + odo.setOffsets(-84.0, -168.0); //these are tuned for 3110-0002-0001 Product Insight #1 + + /* + Set the kind of pods used by your robot. If you're using goBILDA odometry pods, select either + the goBILDA_SWINGARM_POD, or the goBILDA_4_BAR_POD. + If you're using another kind of odometry pod, uncomment setEncoderResolution and input the + number of ticks per mm of your odometry pod. + */ + odo.setEncoderResolution(GoBildaPinpointDriver.GoBildaOdometryPods.goBILDA_4_BAR_POD); + //odo.setEncoderResolution(13.26291192); + + + /* + Set the direction that each of the two odometry pods count. The X (forward) pod should + increase when you move the robot forward. And the Y (strafe) pod should increase when + you move the robot to the left. + */ + odo.setEncoderDirections(GoBildaPinpointDriver.EncoderDirection.FORWARD, GoBildaPinpointDriver.EncoderDirection.FORWARD); + + + /* + Before running the robot, recalibrate the IMU. This needs to happen when the robot is stationary + The IMU will automatically calibrate when first powered on, but recalibrating before running + the robot is a good idea to ensure that the calibration is "good". + resetPosAndIMU will reset the position to 0,0,0 and also recalibrate the IMU. + This is recommended before you run your autonomous, as a bad initial calibration can cause + an incorrect starting value for x, y, and heading. + */ + //odo.recalibrateIMU(); + odo.resetPosAndIMU(); + + telemetry.addData("Status", "Initialized"); + telemetry.addData("X offset", odo.getXOffset()); + telemetry.addData("Y offset", odo.getYOffset()); + telemetry.addData("Device Version Number:", odo.getDeviceVersion()); + telemetry.addData("Device Scalar", odo.getYawScalar()); + telemetry.update(); + + // Wait for the game to start (driver presses START) + waitForStart(); + resetRuntime(); + + + // run until the end of the match (driver presses STOP) + while (opModeIsActive()) { + + /* + Request an update from the Pinpoint odometry computer. This checks almost all outputs + from the device in a single I2C read. + */ + odo.update(); + + /* + Optionally, you can update only the heading of the device. This takes less time to read, but will not + pull any other data. Only the heading (which you can pull with getHeading() or in getPosition(). + */ + //odo.update(GoBildaPinpointDriver.readData.ONLY_UPDATE_HEADING); + + + if (gamepad1.a){ + odo.resetPosAndIMU(); //resets the position to 0 and recalibrates the IMU + } + + if (gamepad1.b){ + odo.recalibrateIMU(); //recalibrates the IMU without resetting position + } + + /* + This code prints the loop frequency of the REV Control Hub. This frequency is effected + by I²C reads/writes. So it's good to keep an eye on. This code calculates the amount + of time each cycle takes and finds the frequency (number of updates per second) from + that cycle time. + */ + double newTime = getRuntime(); + double loopTime = newTime-oldTime; + double frequency = 1/loopTime; + oldTime = newTime; + + + /* + gets the current Position (x & y in mm, and heading in degrees) of the robot, and prints it. + */ + Pose2D pos = odo.getPosition(); + String data = String.format(Locale.US, "{X: %.3f, Y: %.3f, H: %.3f}", pos.getX(DistanceUnit.MM), pos.getY(DistanceUnit.MM), pos.getHeading(AngleUnit.DEGREES)); + telemetry.addData("Position", data); + + /* + gets the current Velocity (x & y in mm/sec and heading in degrees/sec) and prints it. + */ + Pose2D vel = odo.getVelocity(); + String velocity = String.format(Locale.US,"{XVel: %.3f, YVel: %.3f, HVel: %.3f}", vel.getX(DistanceUnit.MM), vel.getY(DistanceUnit.MM), vel.getHeading(AngleUnit.DEGREES)); + telemetry.addData("Velocity", velocity); + + + /* + Gets the Pinpoint device status. Pinpoint can reflect a few states. But we'll primarily see + READY: the device is working as normal + CALIBRATING: the device is calibrating and outputs are put on hold + NOT_READY: the device is resetting from scratch. This should only happen after a power-cycle + FAULT_NO_PODS_DETECTED - the device does not detect any pods plugged in + FAULT_X_POD_NOT_DETECTED - The device does not detect an X pod plugged in + FAULT_Y_POD_NOT_DETECTED - The device does not detect a Y pod plugged in + */ + telemetry.addData("Status", odo.getDeviceStatus()); + + telemetry.addData("Pinpoint Frequency", odo.getFrequency()); //prints/gets the current refresh rate of the Pinpoint + + telemetry.addData("REV Hub Frequency: ", frequency); //prints the control system refresh rate + telemetry.update(); + + } + }} \ No newline at end of file diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/MathFunctions.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/MathFunctions.java index 0edfbfd..bd4e393 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/MathFunctions.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/MathFunctions.java @@ -27,16 +27,16 @@ public class MathFunctions { double denom = 1; // this multiplies up the numerator of the nCr function - for (int i = n; i > n-r; i--) { + for (int i = n; i > n - r; i--) { num *= i; } // this multiplies up the denominator of the nCr function - for (int i = 1; i <=r; i++) { + for (int i = 1; i <= r; i++) { denom *= i; } - return num/denom; + return num / denom; } /** @@ -67,7 +67,7 @@ public class MathFunctions { /** * This normalizes an angle to be between 0 and 2 pi radians, inclusive. - * + *

* IMPORTANT NOTE: This method operates in radians. * * @param angleRadians the angle to be normalized. @@ -75,8 +75,8 @@ public class MathFunctions { */ public static double normalizeAngle(double angleRadians) { double angle = angleRadians; - while (angle<0) angle += 2*Math.PI; - while (angle>2*Math.PI) angle -= 2*Math.PI; + while (angle < 0) angle += 2 * Math.PI; + while (angle > 2 * Math.PI) angle -= 2 * Math.PI; return angle; } @@ -88,7 +88,7 @@ public class MathFunctions { * @return returns the smallest angle. */ public static double getSmallestAngleDifference(double one, double two) { - return Math.min(MathFunctions.normalizeAngle(one-two), MathFunctions.normalizeAngle(two-one)); + return Math.min(MathFunctions.normalizeAngle(one - two), MathFunctions.normalizeAngle(two - one)); } /** @@ -98,7 +98,7 @@ public class MathFunctions { * @return returns the turn direction. */ public static double getTurnDirection(double startHeading, double endHeading) { - if (MathFunctions.normalizeAngle(endHeading-startHeading) >= 0 && MathFunctions.normalizeAngle(endHeading-startHeading) <= Math.PI) { + if (MathFunctions.normalizeAngle(endHeading - startHeading) >= 0 && MathFunctions.normalizeAngle(endHeading - startHeading) <= Math.PI) { return 1; // counter clock wise } return -1; // clock wise @@ -112,7 +112,7 @@ public class MathFunctions { * @return returns the distance between the two. */ public static double distance(Pose pose, Point point) { - return Math.sqrt(Math.pow(pose.getX()-point.getX(), 2) + Math.pow(pose.getY()-point.getY(), 2)); + return Math.sqrt(Math.pow(pose.getX() - point.getX(), 2) + Math.pow(pose.getY() - point.getY(), 2)); } /** @@ -123,7 +123,7 @@ public class MathFunctions { * @return returns the distance between the two. */ public static double distance(Pose one, Pose two) { - return Math.sqrt(Math.pow(one.getX()-two.getX(), 2) + Math.pow(one.getY()-two.getY(), 2)); + return Math.sqrt(Math.pow(one.getX() - two.getX(), 2) + Math.pow(one.getY() - two.getY(), 2)); } /** @@ -172,6 +172,22 @@ public class MathFunctions { return new Pose(one.getX() - two.getX(), one.getY() - two.getY(), one.getHeading() - two.getHeading()); } + /** + * This rotates the given pose by the given theta, + * + * @param pose the Pose to rotate. + * @param theta the angle to rotate by. + * @param rotateHeading whether to adjust the Pose heading too. + * @return the rotated Pose. + */ + public static Pose rotatePose(Pose pose, double theta, boolean rotateHeading) { + double x = pose.getX() * Math.cos(theta) - pose.getY() * Math.sin(theta); + double y = pose.getX() * Math.sin(theta) + pose.getY() * Math.cos(theta); + double heading = rotateHeading ? normalizeAngle(pose.getHeading() + theta) : pose.getHeading(); + + return new Pose(x, y, heading); + } + /** * This multiplies a Point by a scalar and returns the result as a Point * @@ -180,7 +196,7 @@ public class MathFunctions { * @return returns the scaled Point. */ public static Point scalarMultiplyPoint(Point point, double scalar) { - return new Point(point.getX()*scalar, point.getY()*scalar, Point.CARTESIAN); + return new Point(point.getX() * scalar, point.getY() * scalar, Point.CARTESIAN); } /** @@ -213,7 +229,7 @@ public class MathFunctions { * @return returns the scaled Vector. */ public static Vector scalarMultiplyVector(Vector vector, double scalar) { - return new Vector(vector.getMagnitude()*scalar, vector.getTheta()); + return new Vector(vector.getMagnitude() * scalar, vector.getTheta()); } /** @@ -227,7 +243,7 @@ public class MathFunctions { if (vector.getMagnitude() == 0) { return new Vector(0.0, vector.getTheta()); } else { - return new Vector(vector.getMagnitude()/Math.abs(vector.getMagnitude()), vector.getTheta()); + return new Vector(vector.getMagnitude() / Math.abs(vector.getMagnitude()), vector.getTheta()); } } @@ -240,7 +256,7 @@ public class MathFunctions { */ public static Vector addVectors(Vector one, Vector two) { Vector returnVector = new Vector(); - returnVector.setOrthogonalComponents(one.getXComponent()+two.getXComponent(), one.getYComponent()+two.getYComponent()); + returnVector.setOrthogonalComponents(one.getXComponent() + two.getXComponent(), one.getYComponent() + two.getYComponent()); return returnVector; } @@ -254,7 +270,7 @@ public class MathFunctions { */ public static Vector subtractVectors(Vector one, Vector two) { Vector returnVector = new Vector(); - returnVector.setOrthogonalComponents(one.getXComponent()-two.getXComponent(), one.getYComponent()-two.getYComponent()); + returnVector.setOrthogonalComponents(one.getXComponent() - two.getXComponent(), one.getYComponent() - two.getYComponent()); return returnVector; } @@ -266,7 +282,7 @@ public class MathFunctions { * @return returns the dot product of the two Vectors. */ public static double dotProduct(Vector one, Vector two) { - return one.getXComponent()*two.getXComponent() + one.getYComponent()*two.getYComponent(); + return one.getXComponent() * two.getXComponent() + one.getYComponent() * two.getYComponent(); } /** @@ -278,7 +294,7 @@ public class MathFunctions { * @return returns the cross product of the two Vectors. */ public static double crossProduct(Vector one, Vector two) { - return one.getXComponent()*two.getYComponent() - one.getYComponent()*two.getXComponent(); + return one.getXComponent() * two.getYComponent() - one.getYComponent() * two.getXComponent(); } /** @@ -286,7 +302,7 @@ public class MathFunctions { * specified accuracy amount. * * @param one first number specified. - * @param two second number specified. + * @param two Second number specified. * @param accuracy the level of accuracy specified. * @return returns if the two numbers are within the specified accuracy of each other. */ diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/PathBuilder.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/PathBuilder.java index 5bfff7d..f044ce7 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/PathBuilder.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/PathBuilder.java @@ -51,16 +51,47 @@ public class PathBuilder { return this; } + /** + * This adds a default Path defined by a specified BezierCurve to the PathBuilder. + * + * @param controlPoints This is the specified control points that define the BezierCurve. + * @return This returns itself with the updated data. + */ + public PathBuilder addBezierCurve(Point... controlPoints) { + return addPath(new BezierCurve(controlPoints)); + } + + /** + * This adds a default Path defined by a specified BezierCurve to the PathBuilder. + * + * @param controlPoints This is the specified control points that define the BezierCurve. + * @return This returns itself with the updated data. + */ + public PathBuilder addBezierCurve(ArrayList controlPoints) { + return addPath(new BezierCurve(controlPoints)); + } + + /** + * This adds a default Path defined by a specified BezierLine to the PathBuilder. + * + * @param startPoint start point of the line. + * @param endPoint end point of the line. + * @return This returns itself with the updated data. + */ + public PathBuilder addBezierLine(Point startPoint, Point endPoint) { + return addPath(new BezierLine(startPoint, endPoint)); + } + /** * This sets a linear heading interpolation on the last Path added to the PathBuilder. * * @param startHeading The start of the linear heading interpolation. * @param endHeading The end of the linear heading interpolation. - * This will be reached at the end of the Path if no end time is specified. + * This will be reached at the end of the Path if no end time is specified. * @return This returns itself with the updated data. */ public PathBuilder setLinearHeadingInterpolation(double startHeading, double endHeading) { - this.paths.get(paths.size()-1).setLinearHeadingInterpolation(startHeading, endHeading); + this.paths.get(paths.size() - 1).setLinearHeadingInterpolation(startHeading, endHeading); return this; } @@ -69,13 +100,13 @@ public class PathBuilder { * * @param startHeading The start of the linear heading interpolation. * @param endHeading The end of the linear heading interpolation. - * This will be reached at the end of the Path if no end time is specified. + * This will be reached at the end of the Path if no end time is specified. * @param endTime The end time on the Path that the linear heading interpolation will end. - * This value goes from [0, 1] since Bezier curves are parametric functions. + * This value goes from [0, 1] since Bezier curves are parametric functions. * @return This returns itself with the updated data. */ public PathBuilder setLinearHeadingInterpolation(double startHeading, double endHeading, double endTime) { - this.paths.get(paths.size()-1).setLinearHeadingInterpolation(startHeading, endHeading, endTime); + this.paths.get(paths.size() - 1).setLinearHeadingInterpolation(startHeading, endHeading, endTime); return this; } @@ -86,7 +117,7 @@ public class PathBuilder { * @return This returns itself with the updated data. */ public PathBuilder setConstantHeadingInterpolation(double setHeading) { - this.paths.get(paths.size()-1).setConstantHeadingInterpolation(setHeading); + this.paths.get(paths.size() - 1).setConstantHeadingInterpolation(setHeading); return this; } @@ -94,20 +125,21 @@ public class PathBuilder { * This sets a reversed or tangent heading interpolation on the last Path added to the PathBuilder. * * @param set This sets the heading to reversed tangent following if this parameter is true. - * Conversely, this sets a tangent following if this parameter is false. + * Conversely, this sets a tangent following if this parameter is false. * @return This returns itself with the updated data. */ public PathBuilder setReversed(boolean set) { - this.paths.get(paths.size()-1).setReversed(set); + this.paths.get(paths.size() - 1).setReversed(set); return this; } + /** * This sets the heading interpolation to tangential on the last Path added to the PathBuilder. * There really shouldn't be a reason to use this since the default heading interpolation is * tangential but it's here. */ public PathBuilder setTangentHeadingInterpolation() { - this.paths.get(paths.size()-1).setTangentHeadingInterpolation(); + this.paths.get(paths.size() - 1).setTangentHeadingInterpolation(); return this; } @@ -118,7 +150,7 @@ public class PathBuilder { * @return This returns itself with the updated data. */ public PathBuilder setZeroPowerAccelerationMultiplier(double set) { - this.paths.get(paths.size()-1).setZeroPowerAccelerationMultiplier(set); + this.paths.get(paths.size() - 1).setZeroPowerAccelerationMultiplier(set); return this; } @@ -129,7 +161,7 @@ public class PathBuilder { * @return This returns itself with the updated data. */ public PathBuilder setPathEndVelocityConstraint(double set) { - this.paths.get(paths.size()-1).setPathEndVelocityConstraint(set); + this.paths.get(paths.size() - 1).setPathEndVelocityConstraint(set); return this; } @@ -140,7 +172,7 @@ public class PathBuilder { * @return This returns itself with the updated data. */ public PathBuilder setPathEndTranslationalConstraint(double set) { - this.paths.get(paths.size()-1).setPathEndTranslationalConstraint(set); + this.paths.get(paths.size() - 1).setPathEndTranslationalConstraint(set); return this; } @@ -151,7 +183,7 @@ public class PathBuilder { * @return This returns itself with the updated data. */ public PathBuilder setPathEndHeadingConstraint(double set) { - this.paths.get(paths.size()-1).setPathEndHeadingConstraint(set); + this.paths.get(paths.size() - 1).setPathEndHeadingConstraint(set); return this; } @@ -162,7 +194,7 @@ public class PathBuilder { * @return This returns itself with the updated data. */ public PathBuilder setPathEndTValueConstraint(double set) { - this.paths.get(paths.size()-1).setPathEndTValueConstraint(set); + this.paths.get(paths.size() - 1).setPathEndTValueConstraint(set); return this; } @@ -173,7 +205,7 @@ public class PathBuilder { * @return This returns itself with the updated data. */ public PathBuilder setPathEndTimeoutConstraint(double set) { - this.paths.get(paths.size()-1).setPathEndTimeoutConstraint(set); + this.paths.get(paths.size() - 1).setPathEndTimeoutConstraint(set); return this; } @@ -182,12 +214,12 @@ public class PathBuilder { * This callback is set to run at a specified number of milliseconds after the start of the path. * * @param time This sets the number of milliseconds of wait between the start of the Path and - * the calling of the callback. + * the calling of the callback. * @param runnable This sets the code for the callback to run. Use lambda statements for this. * @return This returns itself with the updated data. */ public PathBuilder addTemporalCallback(double time, Runnable runnable) { - this.callbacks.add(new PathCallback(time, runnable, PathCallback.TIME, paths.size()-1)); + this.callbacks.add(new PathCallback(time, runnable, PathCallback.TIME, paths.size() - 1)); return this; } @@ -200,7 +232,7 @@ public class PathBuilder { * @return This returns itself with the updated data. */ public PathBuilder addParametricCallback(double t, Runnable runnable) { - this.callbacks.add(new PathCallback(t, runnable, PathCallback.PARAMETRIC, paths.size()-1)); + this.callbacks.add(new PathCallback(t, runnable, PathCallback.PARAMETRIC, paths.size() - 1)); return this; } diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/Point.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/Point.java index 0da8df8..95a5f05 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/Point.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/Point.java @@ -1,5 +1,7 @@ package org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration; +import androidx.annotation.NonNull; + import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose; /** @@ -31,13 +33,13 @@ public class Point { * This creates a new Point with coordinate inputs and a specified coordinate system. * * @param rOrX Depending on the coordinate system specified, this is either the r or x value. - * In polar coordinates, the r value is the distance from the origin. - * In Cartesian coordinates, the x value is the distance left/right from the origin. + * In polar coordinates, the r value is the distance from the origin. + * In Cartesian coordinates, the x value is the distance left/right from the origin. * @param thetaOrY Depending on the coordinate system specified, this is either the theta or - * y value. - * In polar coordinates, the theta value is the angle from the positive x-axis. - * Increasing theta moves in the counter-clockwise direction. - * In Cartesian coordinates, the y value is the distance up/down from the origin. + * y value. + * In polar coordinates, the theta value is the angle from the positive x-axis. + * Increasing theta moves in the counter-clockwise direction. + * In Cartesian coordinates, the y value is the distance up/down from the origin. * @param identifier this specifies what coordinate system the coordinate inputs are in. */ public Point(double rOrX, double thetaOrY, int identifier) { @@ -53,17 +55,27 @@ public class Point { setCoordinates(pose.getX(), pose.getY(), CARTESIAN); } + /** + * This creates a new Point from a X and Y value. + * + * @param setX the X value. + * @param setY the Y value. + */ + public Point(double setX, double setY) { + setCoordinates(setX, setY, CARTESIAN); + } + /** * This sets the coordinates of the Point using the specified coordinate system. * * @param rOrX Depending on the coordinate system specified, this is either the r or x value. - * In polar coordinates, the r value is the distance from the origin. - * In Cartesian coordinates, the x value is the distance left/right from the origin. + * In polar coordinates, the r value is the distance from the origin. + * In Cartesian coordinates, the x value is the distance left/right from the origin. * @param thetaOrY Depending on the coordinate system specified, this is either the theta or - * y value. - * In polar coordinates, the theta value is the angle from the positive x-axis. - * Increasing theta moves in the counter-clockwise direction. - * In Cartesian coordinates, the y value is the distance up/down from the origin. + * y value. + * In polar coordinates, the theta value is the angle from the positive x-axis. + * Increasing theta moves in the counter-clockwise direction. + * In Cartesian coordinates, the y value is the distance up/down from the origin. * @param identifier this specifies what coordinate system to use when setting values. */ public void setCoordinates(double rOrX, double thetaOrY, int identifier) { @@ -78,9 +90,9 @@ public class Point { theta = setOtherCoordinates[1]; break; default: - if (rOrX<0) { + if (rOrX < 0) { r = -rOrX; - theta = MathFunctions.normalizeAngle(thetaOrY+Math.PI); + theta = MathFunctions.normalizeAngle(thetaOrY + Math.PI); } else { r = rOrX; theta = MathFunctions.normalizeAngle(thetaOrY); @@ -99,7 +111,7 @@ public class Point { * @return returns the distance between the two Points. */ public double distanceFrom(Point otherPoint) { - return Math.sqrt(Math.pow(otherPoint.getX()-x, 2) + Math.pow(otherPoint.getY()-y, 2)); + return Math.sqrt(Math.pow(otherPoint.getX() - x, 2) + Math.pow(otherPoint.getY() - y, 2)); } /** @@ -110,7 +122,7 @@ public class Point { * @return this returns the x and y values, in that order, in an Array of doubles. */ public static double[] polarToCartesian(double r, double theta) { - return new double[] {r * Math.cos(theta), r * Math.sin(theta)}; + return new double[]{r * Math.cos(theta), r * Math.sin(theta)}; } /** @@ -123,17 +135,17 @@ public class Point { public static double[] cartesianToPolar(double x, double y) { if (x == 0) { if (y > 0) { - return new double[] {Math.abs(y), Math.PI/2}; + return new double[]{Math.abs(y), Math.PI / 2}; } else { - return new double[] {Math.abs(y), (3 * Math.PI) / 2}; + return new double[]{Math.abs(y), (3 * Math.PI) / 2}; } } - double r = Math.sqrt(x*x+y*y); - if (x < 0) return new double[] {r, Math.PI+Math.atan(y/x)}; + double r = Math.sqrt(x * x + y * y); + if (x < 0) return new double[]{r, Math.PI + Math.atan(y / x)}; if (y > 0) { return new double[]{r, Math.atan(y / x)}; } else { - return new double[]{r, (2*Math.PI) + Math.atan(y / x)}; + return new double[]{r, (2 * Math.PI) + Math.atan(y / x)}; } } @@ -182,4 +194,10 @@ public class Point { public Point copy() { return new Point(getX(), getY(), CARTESIAN); } + + @NonNull + @Override + public String toString() { + return "(" + getX() + ", " + getY() + ")"; + } } \ No newline at end of file diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/FollowerConstants.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/FollowerConstants.java index 59a64f7..038101c 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/FollowerConstants.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/FollowerConstants.java @@ -30,6 +30,11 @@ public class FollowerConstants { public static String rightFrontMotorName = FRONT_RIGHT_MOTOR; public static String rightRearMotorName = BACK_RIGHT_MOTOR; + public static DcMotorSimple.Direction leftFrontMotorDirection = LEFT_FRONT_MOTOR_DIRECTION; + public static DcMotorSimple.Direction rightFrontMotorDirection = FRONT_RIGHT_MOTOR_DIRECTION; + public static DcMotorSimple.Direction leftRearMotorDirection = REAR_LEFT_MOTOR_DIRECTION; + public static DcMotorSimple.Direction rightRearMotorDirection = REAR_RIGHT_MOTOR_DIRECTION; + // This section is for setting the actual drive vector for the front left wheel, if the robot // is facing a heading of 0 radians with the wheel centered at (0,0) private static final double xMovement = ROBOT_SPEED_FORWARD; @@ -184,7 +189,7 @@ public class FollowerConstants { // the limit at which the heading PIDF switches between the main and secondary heading PIDFs - public static double headingPIDFSwitch = Math.PI/20; + public static double headingPIDFSwitch = Math.PI / 20; // Secondary heading error PIDF coefficients public static CustomPIDFCoefficients secondaryHeadingPIDFCoefficients = new CustomPIDFCoefficients( diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardVelocityTuner.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardVelocityTuner.java index 5f25325..cb64063 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardVelocityTuner.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardVelocityTuner.java @@ -4,6 +4,10 @@ import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstan import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection; import com.acmerobotics.dashboard.FtcDashboard; import com.acmerobotics.dashboard.config.Config; @@ -12,7 +16,6 @@ import com.qualcomm.robotcore.eventloop.opmode.Autonomous; import com.qualcomm.robotcore.eventloop.opmode.OpMode; import com.qualcomm.robotcore.hardware.DcMotor; import com.qualcomm.robotcore.hardware.DcMotorEx; -import com.qualcomm.robotcore.hardware.DcMotorSimple; import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType; import org.firstinspires.ftc.robotcore.external.Telemetry; @@ -40,7 +43,7 @@ import java.util.List; * @version 1.0, 3/13/2024 */ @Config -@Autonomous (name = "Forward Velocity Tuner", group = "Autonomous Pathing Tuning") +@Autonomous(name = "Forward Velocity Tuner", group = "Autonomous Pathing Tuning") public class ForwardVelocityTuner extends OpMode { private ArrayList velocities = new ArrayList<>(); @@ -52,7 +55,7 @@ public class ForwardVelocityTuner extends OpMode { private PoseUpdater poseUpdater; - public static double DISTANCE = 40; + public static double DISTANCE = 48; public static double RECORD_NUMBER = 10; private Telemetry telemetryA; @@ -71,10 +74,10 @@ public class ForwardVelocityTuner extends OpMode { leftRear = hardwareMap.get(DcMotorEx.class, leftRearMotorName); rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName); rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName); - - // TODO: Make sure that this is the direction your motors need to be reversed in. - leftFront.setDirection(DcMotorSimple.Direction.REVERSE); - leftRear.setDirection(DcMotorSimple.Direction.REVERSE); + leftFront.setDirection(leftFrontMotorDirection); + leftRear.setDirection(leftRearMotorDirection); + rightFront.setDirection(rightFrontMotorDirection); + rightRear.setDirection(rightRearMotorDirection); motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear); @@ -93,13 +96,11 @@ public class ForwardVelocityTuner extends OpMode { } telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry()); -// telemetryA.addLine("The robot will run at 1 power until it reaches " + DISTANCE + " inches forward."); -// telemetryA.addLine("Make sure you have enough room, since the robot has inertia after cutting power."); -// telemetryA.addLine("After running the distance, the robot will cut power from the drivetrain and display the forward velocity."); -// telemetryA.addLine("Press CROSS or A on game pad 1 to stop."); -// -// -// telemetryA.update(); + telemetryA.addLine("The robot will run at 1 power until it reaches " + DISTANCE + " inches forward."); + telemetryA.addLine("Make sure you have enough room, since the robot has inertia after cutting power."); + telemetryA.addLine("After running the distance, the robot will cut power from the drivetrain and display the forward velocity."); + telemetryA.addLine("Press CROSS or A on game pad 1 to stop."); + telemetryA.update(); } @@ -124,6 +125,10 @@ public class ForwardVelocityTuner extends OpMode { @Override public void loop() { if (gamepad1.cross || gamepad1.a) { + for (DcMotorEx motor : motors) { + motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE); + motor.setPower(0); + } requestOpModeStop(); } @@ -140,14 +145,14 @@ public class ForwardVelocityTuner extends OpMode { velocities.add(currentVelocity); velocities.remove(0); } - telemetryA.addData("x", poseUpdater.getPose().getX()); - telemetryA.addData("y", poseUpdater.getPose().getY()); - telemetryA.addData("heading", poseUpdater.getPose().getHeading()); - telemetryA.addData("velo mag", poseUpdater.getVelocity().getMagnitude()); - telemetryA.addData("velo ", poseUpdater.getVelocity().getTheta()); - telemetryA.update(); - } else { + leftFront.setPower(0); + leftRear.setPower(0); + rightFront.setPower(0); + rightRear.setPower(0); + for (DcMotorEx motor : motors) { + motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE); + } double average = 0; for (Double velocity : velocities) { average += velocity; @@ -155,12 +160,7 @@ public class ForwardVelocityTuner extends OpMode { average /= (double) velocities.size(); telemetryA.addData("forward velocity:", average); -// telemetryA.addData("x", poseUpdater.getPose().getX()); -// telemetryA.addData("y", poseUpdater.getPose().getY()); -// telemetryA.addData("heading", poseUpdater.getPose().getHeading()); -// telemetryA.addData("velo mag", poseUpdater.getVelocity().getMagnitude()); -// telemetryA.addData("velo ", poseUpdater.getVelocity().getTheta()); -// telemetryA.update(); + telemetryA.update(); } } } diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardZeroPowerAccelerationTuner.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardZeroPowerAccelerationTuner.java index 9454e13..e774504 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardZeroPowerAccelerationTuner.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardZeroPowerAccelerationTuner.java @@ -4,6 +4,10 @@ import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstan import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection; import com.acmerobotics.dashboard.FtcDashboard; import com.acmerobotics.dashboard.config.Config; @@ -12,7 +16,6 @@ import com.qualcomm.robotcore.eventloop.opmode.Autonomous; import com.qualcomm.robotcore.eventloop.opmode.OpMode; import com.qualcomm.robotcore.hardware.DcMotor; import com.qualcomm.robotcore.hardware.DcMotorEx; -import com.qualcomm.robotcore.hardware.DcMotorSimple; import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType; import org.firstinspires.ftc.robotcore.external.Telemetry; @@ -40,7 +43,7 @@ import java.util.List; * @version 1.0, 3/13/2024 */ @Config -@Autonomous (name = "Forward Zero Power Acceleration Tuner", group = "Autonomous Pathing Tuning") +@Autonomous(name = "Forward Zero Power Acceleration Tuner", group = "Autonomous Pathing Tuning") public class ForwardZeroPowerAccelerationTuner extends OpMode { private ArrayList accelerations = new ArrayList<>(); @@ -74,10 +77,10 @@ public class ForwardZeroPowerAccelerationTuner extends OpMode { leftRear = hardwareMap.get(DcMotorEx.class, leftRearMotorName); rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName); rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName); - - // TODO: Make sure that this is the direction your motors need to be reversed in. - leftFront.setDirection(DcMotorSimple.Direction.REVERSE); - leftRear.setDirection(DcMotorSimple.Direction.REVERSE); + leftFront.setDirection(leftFrontMotorDirection); + leftRear.setDirection(leftRearMotorDirection); + rightFront.setDirection(rightFrontMotorDirection); + rightRear.setDirection(rightRearMotorDirection); motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear); @@ -120,6 +123,10 @@ public class ForwardZeroPowerAccelerationTuner extends OpMode { @Override public void loop() { if (gamepad1.cross || gamepad1.a) { + for (DcMotorEx motor : motors) { + motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE); + motor.setPower(0); + } requestOpModeStop(); } @@ -149,7 +156,7 @@ public class ForwardZeroPowerAccelerationTuner extends OpMode { for (Double acceleration : accelerations) { average += acceleration; } - average /= (double)accelerations.size(); + average /= (double) accelerations.size(); telemetryA.addData("forward zero power acceleration (deceleration):", average); telemetryA.update(); diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/LateralZeroPowerAccelerationTuner.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/LateralZeroPowerAccelerationTuner.java index 1fab8ae..db4e903 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/LateralZeroPowerAccelerationTuner.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/LateralZeroPowerAccelerationTuner.java @@ -4,6 +4,10 @@ import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstan import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection; import com.acmerobotics.dashboard.FtcDashboard; import com.acmerobotics.dashboard.config.Config; @@ -12,7 +16,6 @@ import com.qualcomm.robotcore.eventloop.opmode.Autonomous; import com.qualcomm.robotcore.eventloop.opmode.OpMode; import com.qualcomm.robotcore.hardware.DcMotor; import com.qualcomm.robotcore.hardware.DcMotorEx; -import com.qualcomm.robotcore.hardware.DcMotorSimple; import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType; import org.firstinspires.ftc.robotcore.external.Telemetry; @@ -40,7 +43,7 @@ import java.util.List; * @version 1.0, 3/13/2024 */ @Config -@Autonomous (name = "Lateral Zero Power Acceleration Tuner", group = "Autonomous Pathing Tuning") +@Autonomous(name = "Lateral Zero Power Acceleration Tuner", group = "Autonomous Pathing Tuning") public class LateralZeroPowerAccelerationTuner extends OpMode { private ArrayList accelerations = new ArrayList<>(); @@ -52,7 +55,7 @@ public class LateralZeroPowerAccelerationTuner extends OpMode { private PoseUpdater poseUpdater; - public static double VELOCITY = 50; + public static double VELOCITY = 30; private double previousVelocity; @@ -74,10 +77,10 @@ public class LateralZeroPowerAccelerationTuner extends OpMode { leftRear = hardwareMap.get(DcMotorEx.class, leftRearMotorName); rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName); rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName); - - // TODO: Make sure that this is the direction your motors need to be reversed in. - leftFront.setDirection(DcMotorSimple.Direction.REVERSE); - leftRear.setDirection(DcMotorSimple.Direction.REVERSE); + leftFront.setDirection(leftFrontMotorDirection); + leftRear.setDirection(leftRearMotorDirection); + rightFront.setDirection(rightFrontMotorDirection); + rightRear.setDirection(rightRearMotorDirection); motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear); @@ -120,11 +123,15 @@ public class LateralZeroPowerAccelerationTuner extends OpMode { @Override public void loop() { if (gamepad1.cross || gamepad1.a) { + for (DcMotorEx motor : motors) { + motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE); + motor.setPower(0); + } requestOpModeStop(); } poseUpdater.update(); - Vector heading = new Vector(1.0, poseUpdater.getPose().getHeading() - Math.PI/2); + Vector heading = new Vector(1.0, poseUpdater.getPose().getHeading() - Math.PI / 2); if (!end) { if (!stopping) { if (MathFunctions.dotProduct(poseUpdater.getVelocity(), heading) > VELOCITY) { @@ -149,7 +156,7 @@ public class LateralZeroPowerAccelerationTuner extends OpMode { for (Double acceleration : accelerations) { average += acceleration; } - average /= (double)accelerations.size(); + average /= (double) accelerations.size(); telemetryA.addData("lateral zero power acceleration (deceleration):", average); telemetryA.update(); diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/StrafeVelocityTuner.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/StrafeVelocityTuner.java index acd90c0..8d51c08 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/StrafeVelocityTuner.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/StrafeVelocityTuner.java @@ -4,6 +4,10 @@ import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstan import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection; +import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection; import com.acmerobotics.dashboard.FtcDashboard; import com.acmerobotics.dashboard.config.Config; @@ -12,7 +16,6 @@ import com.qualcomm.robotcore.eventloop.opmode.Autonomous; import com.qualcomm.robotcore.eventloop.opmode.OpMode; import com.qualcomm.robotcore.hardware.DcMotor; import com.qualcomm.robotcore.hardware.DcMotorEx; -import com.qualcomm.robotcore.hardware.DcMotorSimple; import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType; import org.firstinspires.ftc.robotcore.external.Telemetry; @@ -40,7 +43,7 @@ import java.util.List; * @version 1.0, 3/13/2024 */ @Config -@Autonomous (name = "Strafe Velocity Tuner", group = "Autonomous Pathing Tuning") +@Autonomous(name = "Strafe Velocity Tuner", group = "Autonomous Pathing Tuning") public class StrafeVelocityTuner extends OpMode { private ArrayList velocities = new ArrayList<>(); @@ -52,7 +55,7 @@ public class StrafeVelocityTuner extends OpMode { private PoseUpdater poseUpdater; - public static double DISTANCE = 40; + public static double DISTANCE = 48; public static double RECORD_NUMBER = 10; private Telemetry telemetryA; @@ -71,10 +74,10 @@ public class StrafeVelocityTuner extends OpMode { leftRear = hardwareMap.get(DcMotorEx.class, leftRearMotorName); rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName); rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName); - - // TODO: Make sure that this is the direction your motors need to be reversed in. - leftFront.setDirection(DcMotorSimple.Direction.REVERSE); - leftRear.setDirection(DcMotorSimple.Direction.REVERSE); + leftFront.setDirection(leftFrontMotorDirection); + leftRear.setDirection(leftRearMotorDirection); + rightFront.setDirection(rightFrontMotorDirection); + rightRear.setDirection(rightRearMotorDirection); motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear); @@ -120,6 +123,10 @@ public class StrafeVelocityTuner extends OpMode { @Override public void loop() { if (gamepad1.cross || gamepad1.a) { + for (DcMotorEx motor : motors) { + motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE); + motor.setPower(0); + } requestOpModeStop(); } @@ -132,11 +139,18 @@ public class StrafeVelocityTuner extends OpMode { motor.setPower(0); } } else { - double currentVelocity = Math.abs(MathFunctions.dotProduct(poseUpdater.getVelocity(), new Vector(1, Math.PI/2))); + double currentVelocity = Math.abs(MathFunctions.dotProduct(poseUpdater.getVelocity(), new Vector(1, Math.PI / 2))); velocities.add(currentVelocity); velocities.remove(0); } } else { + leftFront.setPower(0); + leftRear.setPower(0); + rightFront.setPower(0); + rightRear.setPower(0); + for (DcMotorEx motor : motors) { + motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE); + } double average = 0; for (Double velocity : velocities) { average += velocity; diff --git a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/util/Drawing.java b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/util/Drawing.java index 9a2d956..9af9662 100644 --- a/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/util/Drawing.java +++ b/TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/util/Drawing.java @@ -20,11 +20,14 @@ import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; * @version 1.0, 4/22/2024 */ public class Drawing { + public static final double ROBOT_RADIUS = 9; + private static TelemetryPacket packet; /** * This draws everything that will be used in the Follower's telemetryDebug() method. This takes * a Follower as an input, so an instance of the DashbaordDrawingHandler class is not needed. + * * @param follower */ public static void drawDebug(Follower follower) { @@ -35,6 +38,7 @@ public class Drawing { } drawPoseHistory(follower.getDashboardPoseTracker(), "#4CAF50"); drawRobot(follower.getPose(), "#4CAF50"); + sendPacket(); } @@ -114,12 +118,10 @@ public class Drawing { * @param t the Point to draw at */ public static void drawRobotOnCanvas(Canvas c, Point t) { - final double ROBOT_RADIUS = 9; - c.setStrokeWidth(1); c.strokeCircle(t.getX(), t.getY(), ROBOT_RADIUS); - Vector halfv = new Vector(0.5*ROBOT_RADIUS, t.getTheta()); + Vector halfv = new Vector(0.5 * ROBOT_RADIUS, t.getTheta()); Vector p1 = MathFunctions.addVectors(halfv, new Vector(t.getR(), t.getTheta())); Vector p2 = MathFunctions.addVectors(p1, halfv); c.strokeLine(p1.getXComponent(), p1.getYComponent(), p2.getXComponent(), p2.getYComponent()); @@ -133,8 +135,6 @@ public class Drawing { * @param t the Pose to draw at */ public static void drawRobotOnCanvas(Canvas c, Pose t) { - final double ROBOT_RADIUS = 9; - c.strokeCircle(t.getX(), t.getY(), ROBOT_RADIUS); Vector v = t.getHeadingVector(); v.setMagnitude(v.getMagnitude() * ROBOT_RADIUS); diff --git a/build.common.gradle b/build.common.gradle index 12e6acb..72ca9d9 100644 --- a/build.common.gradle +++ b/build.common.gradle @@ -21,7 +21,7 @@ apply plugin: 'com.android.application' android { - compileSdkVersion 29 + compileSdkVersion 30 signingConfigs { release { @@ -113,10 +113,6 @@ android { packagingOptions { pickFirst '**/*.so' } - sourceSets.main { - jni.srcDirs = [] - jniLibs.srcDir rootProject.file('libs') - } ndkVersion '21.3.6528147' } diff --git a/build.dependencies.gradle b/build.dependencies.gradle index 08b6a3b..e890dc6 100644 --- a/build.dependencies.gradle +++ b/build.dependencies.gradle @@ -6,14 +6,14 @@ repositories { } dependencies { - implementation 'org.firstinspires.ftc:Inspection:10.1.0' - implementation 'org.firstinspires.ftc:Blocks:10.1.0' - implementation 'org.firstinspires.ftc:RobotCore:10.1.0' - implementation 'org.firstinspires.ftc:RobotServer:10.1.0' - implementation 'org.firstinspires.ftc:OnBotJava:10.1.0' - implementation 'org.firstinspires.ftc:Hardware:10.1.0' - implementation 'org.firstinspires.ftc:FtcCommon:10.1.0' - implementation 'org.firstinspires.ftc:Vision:10.1.0' + implementation 'org.firstinspires.ftc:Inspection:10.1.1' + implementation 'org.firstinspires.ftc:Blocks:10.1.1' + implementation 'org.firstinspires.ftc:RobotCore:10.1.1' + implementation 'org.firstinspires.ftc:RobotServer:10.1.1' + implementation 'org.firstinspires.ftc:OnBotJava:10.1.1' + implementation 'org.firstinspires.ftc:Hardware:10.1.1' + implementation 'org.firstinspires.ftc:FtcCommon:10.1.1' + implementation 'org.firstinspires.ftc:Vision:10.1.1' implementation 'androidx.appcompat:appcompat:1.2.0' implementation "com.acmerobotics.roadrunner:ftc:0.1.14" diff --git a/build.gradle b/build.gradle index 8969a41..e70f209 100644 --- a/build.gradle +++ b/build.gradle @@ -11,7 +11,7 @@ buildscript { } dependencies { // Note for FTC Teams: Do not modify this yourself. - classpath 'com.android.tools.build:gradle:7.2.0' + classpath 'com.android.tools.build:gradle:8.7.0' } } diff --git a/gradle.properties b/gradle.properties index 7a370c5..f5935e9 100644 --- a/gradle.properties +++ b/gradle.properties @@ -8,3 +8,5 @@ android.enableJetifier=false # Allow Gradle to use up to 1 GB of RAM org.gradle.jvmargs=-Xmx1024M + +android.nonTransitiveRClass=false \ No newline at end of file diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index aa991fc..19cfad9 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,5 +1,5 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew old mode 100755 new mode 100644