Merge remote-tracking branch 'pedro-qs/master' into branch-silver-14493-v2

# Conflicts:
#	FtcRobotController/src/main/AndroidManifest.xml
#	FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptAprilTagLocalization.java
#	README.md
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/LOCALIZATION.md
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/OVERVIEW.md
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/TUNING.md
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/DriveVectorScaler.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/Follower.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Localizer.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Pose.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/PoseUpdater.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/ThreeWheelLocalizer.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LateralTuner.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LocalizationTest.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/MathFunctions.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/PathBuilder.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/Point.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/FollowerConstants.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardVelocityTuner.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardZeroPowerAccelerationTuner.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/LateralZeroPowerAccelerationTuner.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/StrafeVelocityTuner.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/util/Drawing.java
#	build.dependencies.gradle
This commit is contained in:
2024-12-26 18:31:46 -08:00
45 changed files with 3724 additions and 1790 deletions

43
.github/workflows/static.yml vendored Normal file
View File

@ -0,0 +1,43 @@
# Simple workflow for deploying static content to GitHub Pages
name: Deploy static content to Pages
on:
# Runs on pushes targeting the default branch
push:
branches: ["master"]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
# Single deploy job since we're just deploying
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
# Upload entire repository
path: '.'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

View File

@ -14,7 +14,11 @@ android {
buildConfigField "String", "APP_BUILD_TIME", '"' + (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(new Date())) + '"'
}
compileSdkVersion 29
buildFeatures {
buildConfig = true
}
compileSdkVersion 30
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:versionCode="55"
android:versionName="10.0">
android:versionCode="57"
android:versionName="10.1.1">
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />

View File

@ -58,6 +58,9 @@ import java.util.List;
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the robot, relative to the field origin.
* This information is provided in the "robotPose" member of the returned "detection".
*
* To learn about the Field Coordinate System that is defined for FTC (and used by this OpMode), see the FTC-DOCS link below:
* https://ftc-docs.firstinspires.org/en/latest/game_specific_resources/field_coordinate_system/field-coordinate-system.html
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/

View File

@ -0,0 +1,191 @@
/*
* Copyright (c) 2024 Phil Malone
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.util.Size;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.util.SortOrder;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.opencv.ColorBlobLocatorProcessor;
import org.firstinspires.ftc.vision.opencv.ColorRange;
import org.firstinspires.ftc.vision.opencv.ImageRegion;
import org.opencv.core.RotatedRect;
import java.util.List;
/*
* This OpMode illustrates how to use a video source (camera) to locate specifically colored regions
*
* Unlike a "color sensor" which determines the color of an object in the field of view, this "color locator"
* will search the Region Of Interest (ROI) in a camera image, and find any "blobs" of color that match the requested color range.
* These blobs can be further filtered and sorted to find the one most likely to be the item the user is looking for.
*
* To perform this function, a VisionPortal runs a ColorBlobLocatorProcessor process.
* The ColorBlobLocatorProcessor process is created first, and then the VisionPortal is built to use this process.
* The ColorBlobLocatorProcessor analyses the ROI and locates pixels that match the ColorRange to form a "mask".
* The matching pixels are then collected into contiguous "blobs" of pixels. The outer boundaries of these blobs are called its "contour".
* For each blob, the process then creates the smallest possible rectangle "boxFit" that will fully encase the contour.
* The user can then call getBlobs() to retrieve the list of Blobs, where each Blob contains the contour and the boxFit data.
* Note: The default sort order for Blobs is ContourArea, in descending order, so the biggest contours are listed first.
*
* To aid the user, a colored boxFit rectangle is drawn on the camera preview to show the location of each Blob
* The original Blob contour can also be added to the preview. This is helpful when configuring the ColorBlobLocatorProcessor parameters.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@Disabled
@TeleOp(name = "Concept: Vision Color-Locator", group = "Concept")
public class ConceptVisionColorLocator extends LinearOpMode
{
@Override
public void runOpMode()
{
/* Build a "Color Locator" vision processor based on the ColorBlobLocatorProcessor class.
* - Specify the color range you are looking for. You can use a predefined color, or create you own color range
* .setTargetColorRange(ColorRange.BLUE) // use a predefined color match
* Available predefined colors are: RED, BLUE YELLOW GREEN
* .setTargetColorRange(new ColorRange(ColorSpace.YCrCb, // or define your own color match
* new Scalar( 32, 176, 0),
* new Scalar(255, 255, 132)))
*
* - Focus the color locator by defining a RegionOfInterest (ROI) which you want to search.
* This can be the entire frame, or a sub-region defined using:
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
* Use one form of the ImageRegion class to define the ROI.
* ImageRegion.entireFrame()
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner
* ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5) 50% width/height square centered on screen
*
* - Define which contours are included.
* You can get ALL the contours, or you can skip any contours that are completely inside another contour.
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.ALL_FLATTENED_HIERARCHY) // return all contours
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude contours inside other contours
* note: EXTERNAL_ONLY helps to avoid bright reflection spots from breaking up areas of solid color.
*
* - turn the display of contours ON or OFF. Turning this on helps debugging but takes up valuable CPU time.
* .setDrawContours(true)
*
* - include any pre-processing of the image or mask before looking for Blobs.
* There are some extra processing you can include to improve the formation of blobs. Using these features requires
* an understanding of how they may effect the final blobs. The "pixels" argument sets the NxN kernel size.
* .setBlurSize(int pixels) Blurring an image helps to provide a smooth color transition between objects, and smoother contours.
* The higher the number of pixels, the more blurred the image becomes.
* Note: Even "pixels" values will be incremented to satisfy the "odd number" requirement.
* Blurring too much may hide smaller features. A "pixels" size of 5 is good for a 320x240 image.
* .setErodeSize(int pixels) Erosion removes floating pixels and thin lines so that only substantive objects remain.
* Erosion can grow holes inside regions, and also shrink objects.
* "pixels" in the range of 2-4 are suitable for low res images.
* .setDilateSize(int pixels) Dilation makes objects more visible by filling in small holes, making lines appear thicker,
* and making filled shapes appear larger. Dilation is useful for joining broken parts of an
* object, such as when removing noise from an image.
* "pixels" in the range of 2-4 are suitable for low res images.
*/
ColorBlobLocatorProcessor colorLocator = new ColorBlobLocatorProcessor.Builder()
.setTargetColorRange(ColorRange.BLUE) // use a predefined color match
.setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude blobs inside blobs
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5)) // search central 1/4 of camera view
.setDrawContours(true) // Show contours on the Stream Preview
.setBlurSize(5) // Smooth the transitions between different colors in image
.build();
/*
* Build a vision portal to run the Color Locator process.
*
* - Add the colorLocator process created above.
* - Set the desired video resolution.
* Since a high resolution will not improve this process, choose a lower resolution that is
* supported by your camera. This will improve overall performance and reduce latency.
* - Choose your video source. This may be
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
* or
* .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera
*/
VisionPortal portal = new VisionPortal.Builder()
.addProcessor(colorLocator)
.setCameraResolution(new Size(320, 240))
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.build();
telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging.
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
// WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode.
while (opModeIsActive() || opModeInInit())
{
telemetry.addData("preview on/off", "... Camera Stream\n");
// Read the current list
List<ColorBlobLocatorProcessor.Blob> blobs = colorLocator.getBlobs();
/*
* The list of Blobs can be filtered to remove unwanted Blobs.
* Note: All contours will be still displayed on the Stream Preview, but only those that satisfy the filter
* conditions will remain in the current list of "blobs". Multiple filters may be used.
*
* Use any of the following filters.
*
* ColorBlobLocatorProcessor.Util.filterByArea(minArea, maxArea, blobs);
* A Blob's area is the number of pixels contained within the Contour. Filter out any that are too big or small.
* Start with a large range and then refine the range based on the likely size of the desired object in the viewfinder.
*
* ColorBlobLocatorProcessor.Util.filterByDensity(minDensity, maxDensity, blobs);
* A blob's density is an indication of how "full" the contour is.
* If you put a rubber band around the contour you would get the "Convex Hull" of the contour.
* The density is the ratio of Contour-area to Convex Hull-area.
*
* ColorBlobLocatorProcessor.Util.filterByAspectRatio(minAspect, maxAspect, blobs);
* A blob's Aspect ratio is the ratio of boxFit long side to short side.
* A perfect Square has an aspect ratio of 1. All others are > 1
*/
ColorBlobLocatorProcessor.Util.filterByArea(50, 20000, blobs); // filter out very small blobs.
/*
* The list of Blobs can be sorted using the same Blob attributes as listed above.
* No more than one sort call should be made. Sorting can use ascending or descending order.
* ColorBlobLocatorProcessor.Util.sortByArea(SortOrder.DESCENDING, blobs); // Default
* ColorBlobLocatorProcessor.Util.sortByDensity(SortOrder.DESCENDING, blobs);
* ColorBlobLocatorProcessor.Util.sortByAspectRatio(SortOrder.DESCENDING, blobs);
*/
telemetry.addLine(" Area Density Aspect Center");
// Display the size (area) and center location for each Blob.
for(ColorBlobLocatorProcessor.Blob b : blobs)
{
RotatedRect boxFit = b.getBoxFit();
telemetry.addLine(String.format("%5d %4.2f %5.2f (%3d,%3d)",
b.getContourArea(), b.getDensity(), b.getAspectRatio(), (int) boxFit.center.x, (int) boxFit.center.y));
}
telemetry.update();
sleep(50);
}
}
}

View File

@ -0,0 +1,136 @@
/*
* Copyright (c) 2024 Phil Malone
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.graphics.Color;
import android.util.Size;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.opencv.ImageRegion;
import org.firstinspires.ftc.vision.opencv.PredominantColorProcessor;
/*
* This OpMode illustrates how to use a video source (camera) as a color sensor
*
* A "color sensor" will typically determine the color of the object that it is pointed at.
*
* This sample performs the same function, except it uses a video camera to inspect an object or scene.
* The user may choose to inspect all, or just a Region of Interest (ROI), of the active camera view.
* The user must also provide a list of "acceptable colors" (Swatches) from which the closest matching color will be selected.
*
* To perform this function, a VisionPortal runs a PredominantColorProcessor process.
* The PredominantColorProcessor process is created first, and then the VisionPortal is built to use this process.
* The PredominantColorProcessor analyses the ROI and splits the colored pixels into several color-clusters.
* The largest of these clusters is then considered to be the "Predominant Color"
* The process then matches the Predominant Color with the closest Swatch and returns that match.
*
* To aid the user, a colored rectangle is drawn on the camera preview to show the RegionOfInterest,
* The Predominant Color is used to paint the rectangle border, so the user can verify that the color is reasonable.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@Disabled
@TeleOp(name = "Concept: Vision Color-Sensor", group = "Concept")
public class ConceptVisionColorSensor extends LinearOpMode
{
@Override
public void runOpMode()
{
/* Build a "Color Sensor" vision processor based on the PredominantColorProcessor class.
*
* - Focus the color sensor by defining a RegionOfInterest (ROI) which you want to inspect.
* This can be the entire frame, or a sub-region defined using:
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
* Use one form of the ImageRegion class to define the ROI.
* ImageRegion.entireFrame()
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner
* ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1) 10% width/height square centered on screen
*
* - Set the list of "acceptable" color swatches (matches).
* Only colors that you assign here will be returned.
* If you know the sensor will be pointing to one of a few specific colors, enter them here.
* Or, if the sensor may be pointed randomly, provide some additional colors that may match the surrounding.
* Possible choices are:
* RED, ORANGE, YELLOW, GREEN, CYAN, BLUE, PURPLE, MAGENTA, BLACK, WHITE;
*
* Note that in the example shown below, only some of the available colors are included.
* This will force any other colored region into one of these colors.
* eg: Green may be reported as YELLOW, as this may be the "closest" match.
*/
PredominantColorProcessor colorSensor = new PredominantColorProcessor.Builder()
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1))
.setSwatches(
PredominantColorProcessor.Swatch.RED,
PredominantColorProcessor.Swatch.BLUE,
PredominantColorProcessor.Swatch.YELLOW,
PredominantColorProcessor.Swatch.BLACK,
PredominantColorProcessor.Swatch.WHITE)
.build();
/*
* Build a vision portal to run the Color Sensor process.
*
* - Add the colorSensor process created above.
* - Set the desired video resolution.
* Since a high resolution will not improve this process, choose a lower resolution that is
* supported by your camera. This will improve overall performance and reduce latency.
* - Choose your video source. This may be
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
* or
* .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera
*/
VisionPortal portal = new VisionPortal.Builder()
.addProcessor(colorSensor)
.setCameraResolution(new Size(320, 240))
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.build();
telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging.
// WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode.
while (opModeIsActive() || opModeInInit())
{
telemetry.addData("DS preview on/off", "3 dots, Camera Stream\n");
// Request the most recent color analysis.
// This will return the closest matching colorSwatch and the predominant RGB color.
// Note: to take actions based on the detected color, simply use the colorSwatch in a comparison or switch.
// eg:
// if (result.closestSwatch == PredominantColorProcessor.Swatch.RED) {... some code ...}
PredominantColorProcessor.Result result = colorSensor.getAnalysis();
// Display the Color Sensor result.
telemetry.addData("Best Match:", result.closestSwatch);
telemetry.addLine(String.format("R %3d, G %3d, B %3d", Color.red(result.rgb), Color.green(result.rgb), Color.blue(result.rgb)));
telemetry.update();
sleep(20);
}
}
}

1554
README.md

File diff suppressed because it is too large Load Diff

View File

@ -4,10 +4,10 @@ the pose exponential method of localization. It's basically a way of turning mov
robot's coordinate frame to the global coordinate frame. If you're interested in reading more about
it, then check out pages 177 - 183 of [Controls Engineering in the FIRST Robotics Competition](https://file.tavsys.net/control/controls-engineering-in-frc.pdf)
by Tyler Veness. However, the OTOS localizer uses its own onboard system for calculating localization,
which I do not know about.
which we do not know about.
## Setting Your Localizer
Go to line `70` in the `PoseUpdater` class, and replace the `new ThreeWheelLocalizer(hardwareMap)`
Go to line `73` in the `PoseUpdater` class, and replace the `new ThreeWheelLocalizer(hardwareMap)`
with the localizer that applies to you:
* If you're using drive encoders, put `new DriveEncoderLocalizer(hardwareMap)`
* If you're using two wheel odometry, put `new TwoWheelLocalizer(hardwareMap)`
@ -15,6 +15,7 @@ with the localizer that applies to you:
don't change it from the default
* If you're using three wheel odometry with the IMU, put `new ThreeWheelIMULocalizer(hardwareMap)`
* If you're using OTOS, put `new OTOSLocalizer(hardwareMap)`
* If you're using Pinpoint, put `new PinpointLocalizer(hardwareMap)`
## Tuning
To start, you'll want to select your localizer of choice. Below, I'll have instructions for the drive
@ -241,6 +242,23 @@ that applies to you and follow the directions there.
robot around and see if the movements look accurate on FTC Dashboard. If they don't, then you'll
want to re-run some of the previous steps. Otherwise, congrats on tuning your localizer!
## Pinpoint Localizer
* First you will need to plug in the pinpoint to the i2c port. Ensure that the dead wheel encoder wires are
plugged into the proper ports on the pinpoint to ensure no error within the tuning steps.
* Then, go to the `PinpointLocalier.java` file and go to where it tells you to replace
the current statement with your pinpoint port in the constructor. Replace the `deviceName` parameter
with the name of the port that the pinpoint is connected to.
* Next, follow the instructions left by the TODO: comment and enter in the odometry measurements either in
mms or inches (We have the conversion rates listed).
* First, to ensure that your pinpoint is properly connected, please run the `SensorGoBildaPinpointExample.java`
file left in the `tuning` folder located within `localization`.
* Once completed, the localizer should be properly tuned. To test it out, you can go to
`Localization Test` and push around or drive around your robot. Go to [FTC Dashboard](http://192.168.43.1:8080/dash)
and on the top right, switch the drop down from the default view to the field view. Then, on the bottom
left corner, you should see a field and the robot being drawn on the field. You can then move your
robot around and see if the movements look accurate on FTC Dashboard. If they don't, then you'll
want to re-run some of the previous steps. Otherwise, congrats on tuning your localizer!
## Using Road Runner's Localizer
Of course, many teams have experience using Road Runner in the past and so have localizers from Road
Runner that are tuned. There is an adapter for the Road Runner three wheel localizer to the Pedro

View File

@ -19,7 +19,7 @@ Why use Pedro Pathing? Why not something else like Road Runner or Pure Pursuit?
* Pure Pursuit searches for the farthest point on the path that's within a certain radius from the robot. Pure Pursuit will then go in a straight line to that point. This poses several problems, as a small search radius will cause some oscillations on corners, and a large search radius will cut corners on paths, which makes the paths inaccurate to real life.
* Pedro Pathing instead corrects to the closest point on the path while still following the path. This ensures that the follower will stay on the path while still being able to move forward along the path without cutting corners or encountering oscillation issues.
* Why not Road Runner?
* Road Runner is a motion profile based follower, which means that a set of instructions for motor powers are calculated for each path beforehand and then run. After reaching the end of this motion profile, Road Runner corrects. This can be sufficient for most situations, but if the robot encounters an obstacle or wheel slippage, it may be unable to correct in time.
* Road Runner is a motion profile based follower, which means that a set of instructions for motor powers are calculated for each path beforehand and then run. During this motion profile, Road Runner can struggle to correct. This can be sufficient for many situations, but if the robot encounters an obstacle or wheel slippage, it may be unable to correct in time.
* Pedro Pathing instead dynamically corrects throughout the path. The movement vectors are calculated at every point along the path, and because of this, the path can even be changed midway through and Pedro Pathing will still be able to correct. Since correction occurs throughout the path, the error correction isn't concentrated on the end of the path and therefore the robot is able to better minimize error.
## How Does Pedro Path?
@ -105,4 +105,5 @@ of the curvature formula, we can estimate a centripetal force correction and app
control.
## Questions?
If you still have more questions, feel free to contact us at `scottsbots10158@gmail.com`
If you still have more questions, feel free to contact us at `scottsbots10158@gmail.com` or
within our discord linked here(https://discord.gg/2GfC4qBP5s)

View File

@ -14,45 +14,56 @@ that the tuners require you to push the robot or the tuners output will say "inc
measurements will be in centimeters.
## Tuning
* To start with, we need the mass of the robot in kg. This is used for the centripetal force correction,
and the mass, with the variable name `mass`, should be put on line `86` in the `FollowerConstants`
* First, make sure that your motor names and directions, located at the top of `FollowerConstants`,
are correct.
* After that, we need the mass of the robot in kg. This is used for the centripetal force correction,
and the mass, with the variable name `mass`, should be put on line `92` in the `FollowerConstants`
class under the `tuning` package.
* Next, we need to find the preferred mecanum drive vectors. The rollers on mecanum wheels point at a
45 degree angle from the forward direction, but the actual direction the force is output is actually
closer to forward. To find the direction your wheels will go, you will need to run the
closer to forward. Before running any OpModes, make sure your motors are reversed properly in the
`Follower` class constructor. To find the direction your wheels will go, you will need to run the
`Forward Velocity Tuner` and `Strafe Velocity Tuner` OpModes. These will run your robot at full
power for 40 inches forward and to the right, respectively. The distance can be changed through FTC
Dashboard under the dropdown for each respective class, but higher distances work better. After the
distance has finished running, the end velocity will be output to telemetry. The robot may continue
to drift a little bit after the robot has finished running the distance, so make sure you have
plenty of room. Once you're done, put the velocity for the `Forward Velocity Tuner` on line `33` in
the `FollowerConstants` class, and the velocity for the `Strafe Velocity Tuner` on line `34` in the
plenty of room. Once you're done, put the velocity for the `Forward Velocity Tuner` on line `39` in
the `FollowerConstants` class, and the velocity for the `Strafe Velocity Tuner` on line `40` in the
`FollowerConstants` class. The variable names should be `xMovement` and `yMovement`, respectively.
* The last set of automatic tuners you'll need to run are the zero power acceleration tuners. These
find the rate at which your robot decelerates when power is cut from the drivetrain. This is used to
get a more accurate estimation of the drive vector. To find this, you will need to run the
`Forward Zero Power Acceleration Tuner` and the `Lateral Zero Power Acceleration Tuner` OpModes.
These will run your robot until it hits a velocity of 10 inches/second forward and to the right,
These will run your robot until it hits a velocity of 30 inches/second forward and to the right,
respectively. The velocity can be changed through FTC Dashboard under the dropdown for each
respective class, but higher velocities work better. After the velocity has been reached, power will
be cut from the drivetrain and the robot's deceleration will be tracked until the robot stops, at
which point it will display the deceleration in telemetry. This robot will need to drift to a stop
to properly work, and the higher the velocity the greater the drift distance, so make sure you have
enough room. Once you're done, put the zero power acceleration for the
`Forward Zero Power Acceleration Tuner` on line `94` in the `FollowerConstants` class and the zero
power acceleration for the `Lateral Zero Power Acceleration Tuner` on line `98` in the
`Forward Zero Power Acceleration Tuner` on line `100` in the `FollowerConstants` class and the zero
power acceleration for the `Lateral Zero Power Acceleration Tuner` on line `104` in the
`FollowerConstants` class. The variable names should be `forwardZeroPowerAcceleration` and
`lateralZeroPowerAcceleration`, respectively.
* After this, we will want to tune the translational PID. Go to FTC Dashboard and disable all but
the `useTranslational` checkboxes under the `Follower` tab. Then, run `StraightBackAndForth`. Make
sure you disable the timer on autonomous OpModes. The PID for the translational error is called
`translationalPIDF`. If you need to add a feedforward value, use the `translationalPIDFFeedForward`
since that will add the feedforward in the direction the robot is trying to move, rather than the
feedforward in the PIDF itself, since those will only add the feedforward one way. You can change
the PIDF constants and feedforward values, under the `FollowerConstants` tab in FTC Dashboard.
the `useTranslational` checkboxes under the `Follower` tab. Then, run `StraightBackAndForth`.
Make sure you disable the timer on autonomous OpModes. You will notice in telemetry a message saying
that the robot will travel a distance forward and backward, this will not happen until later, so for
now you can ignore this message. The robot should not move when you run the opmode initally. Instead,
it should correct when you push it away from its starting position. Note that this correction should
happen regardless of the robot's rotation, and that the robot should not rotate itself (if it does,
disable `useHeading` as mentioned prior). Also note that the robot will only correct to an imaginary line
that runs straight forward from the robot's starting position, meaning that it will not correct in the
(original) forward direction. The PID for the translational error is called `translationalPIDF`.
If you need to add a feedforward value, use the `translationalPIDFFeedForward` since that will add
the feedforward in the direction the robot is trying to move, rather than the feedforward in the
PIDF itself, since those will only add the feedforward one way. You can change the PIDF constants
and feedforward values, under the `FollowerConstants` tab in FTC Dashboard.
To tune the PID, push the robot off the path and see how corrects. You will want to alternate sides
you push to reduce field wear and tear as well as push with varying power and distance. I would
recommend tuning the PID so that it is capable of correcting while minimizing oscillations and still
@ -70,9 +81,8 @@ measurements will be in centimeters.
`zeroPowerAccelerationMultiplier`. This determines how fast your robot will decelerate as a factor
of how fast your robot will coast to a stop. Honestly, this is up to you. I personally used 4, but
what works best for you is most important. Higher numbers will cause a faster brake, but increase
oscillations at the end. Lower numbers will do the opposite. This can be found on line `107` in
`FollowerConstants`, named `zeroPowerAccelerationMultiplier`. The drive PID is much, much more
* sensitive than the others. For reference,
oscillations at the end. Lower numbers will do the opposite. This can be found on line `113` in
`FollowerConstants`, named `zeroPowerAccelerationMultiplier`. The drive PID is much, much more sensitive than the others. For reference,
my P values were in the hundredths and thousandths place values, and my D values were in the hundred
thousandths and millionths place values. To tune this, enable `useDrive`, `useHeading`, and
`useTranslational` in the `Follower` dropdown in FTC Dashboard. Next, run `StraightBackAndForth`
@ -97,7 +107,7 @@ measurements will be in centimeters.
* Finally, we will want to tune the centripetal force correction. This is a pretty simple tune. Open
up FTC Dashboard and enable everything under the `Follower` tab. Then, run `CurvedBackAndForth`
and turn off its timer. If you notice the robot is correcting towards the inside of the curve
as/after running a path, then increase `centripetalScaling`, which can be found on line `89` of
as/after running a path, then increase `centripetalScaling`, which can be found on line `95` of
`FollowerConstants`. If the robot is correcting towards the outside of the curve, then decrease
`centripetalScaling`.
@ -110,11 +120,11 @@ measurements will be in centimeters.
## Note About the PIDs
In versions of Pedro Pathing before early August 2024, there were 2 PIDs used in the translational,
heading, and drive control. However, now there is only one main PID. The old system can still be used.
Scroll down to the bottom of `FollowerConstants` and set all the booleans from lines `157` to `159`
Scroll down to the bottom of `FollowerConstants` and set all the booleans from lines `163` to `165`
to true. They should be named `useSecondaryTranslationalPID`, `useSecondaryHeadingPID`, and `useSecondaryDrivePID`.
This will enable the two PID system that Pedro Pathing originally used. From there, scroll
down and all the values pertaining to the secondary PIDs will be there. The two PID system works with
a PID that handles larger errors (the main PID) and a second PID to handle smaller errors (the
secondary PID). The main PID should be tuned to move the error within the secondary PID's range
without providing too much momentum that could cause an overshoot. The secondary PID should be tuned
to correct within its range quickly and accurately while minimizing oscillations.
to correct within its range quickly and accurately while minimizing oscillations.

View File

@ -15,6 +15,7 @@ import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
public class DriveVectorScaler {
// This is ordered left front, left back, right front, right back. These are also normalized.
private Vector[] mecanumVectors;
private double maxPowerScaling = 1;
/**
* This creates a new DriveVectorScaler, which takes in various movement vectors and outputs
@ -52,9 +53,9 @@ public class DriveVectorScaler {
*/
public double[] getDrivePowers(Vector correctivePower, Vector headingPower, Vector pathingPower, double robotHeading) {
// clamps down the magnitudes of the input vectors
if (correctivePower.getMagnitude() > 1) correctivePower.setMagnitude(1);
if (headingPower.getMagnitude() > 1) headingPower.setMagnitude(1);
if (pathingPower.getMagnitude() > 1) pathingPower.setMagnitude(1);
if (correctivePower.getMagnitude() > maxPowerScaling) correctivePower.setMagnitude(maxPowerScaling);
if (headingPower.getMagnitude() > maxPowerScaling) headingPower.setMagnitude(maxPowerScaling);
if (pathingPower.getMagnitude() > maxPowerScaling) pathingPower.setMagnitude(maxPowerScaling);
// the powers for the wheel vectors
double [] wheelPowers = new double[4];
@ -65,8 +66,8 @@ public class DriveVectorScaler {
// this contains the pathing vectors, one for each side (heading control requires 2)
Vector[] truePathingVectors = new Vector[2];
if (correctivePower.getMagnitude() == 1) {
// checks for corrective power equal to 1 in magnitude. if equal to one, then set pathing power to that
if (correctivePower.getMagnitude() == maxPowerScaling) {
// checks for corrective power equal to max power scaling in magnitude. if equal, then set pathing power to that
truePathingVectors[0] = MathFunctions.copyVector(correctivePower);
truePathingVectors[1] = MathFunctions.copyVector(correctivePower);
} else {
@ -74,7 +75,7 @@ public class DriveVectorScaler {
Vector leftSideVector = MathFunctions.subtractVectors(correctivePower, headingPower);
Vector rightSideVector = MathFunctions.addVectors(correctivePower, headingPower);
if (leftSideVector.getMagnitude() > 1 || rightSideVector.getMagnitude() > 1) {
if (leftSideVector.getMagnitude() > maxPowerScaling || rightSideVector.getMagnitude() > maxPowerScaling) {
//if the combined corrective and heading power is greater than 1, then scale down heading power
double headingScalingFactor = Math.min(findNormalizingScaling(correctivePower, headingPower), findNormalizingScaling(correctivePower, MathFunctions.scalarMultiplyVector(headingPower, -1)));
truePathingVectors[0] = MathFunctions.subtractVectors(correctivePower, MathFunctions.scalarMultiplyVector(headingPower, headingScalingFactor));
@ -84,7 +85,7 @@ public class DriveVectorScaler {
Vector leftSideVectorWithPathing = MathFunctions.addVectors(leftSideVector, pathingPower);
Vector rightSideVectorWithPathing = MathFunctions.addVectors(rightSideVector, pathingPower);
if (leftSideVectorWithPathing.getMagnitude() > 1 || rightSideVectorWithPathing.getMagnitude() > 1) {
if (leftSideVectorWithPathing.getMagnitude() > maxPowerScaling || rightSideVectorWithPathing.getMagnitude() > maxPowerScaling) {
// too much power now, so we scale down the pathing vector
double pathingScalingFactor = Math.min(findNormalizingScaling(leftSideVector, pathingPower), findNormalizingScaling(rightSideVector, pathingPower));
truePathingVectors[0] = MathFunctions.addVectors(leftSideVector, MathFunctions.scalarMultiplyVector(pathingPower, pathingScalingFactor));
@ -113,7 +114,7 @@ public class DriveVectorScaler {
wheelPowers[3] = (mecanumVectorsCopy[2].getXComponent()*truePathingVectors[1].getYComponent() - truePathingVectors[1].getXComponent()*mecanumVectorsCopy[2].getYComponent()) / (mecanumVectorsCopy[2].getXComponent()*mecanumVectorsCopy[3].getYComponent() - mecanumVectorsCopy[3].getXComponent()*mecanumVectorsCopy[2].getYComponent());
double wheelPowerMax = Math.max(Math.max(Math.abs(wheelPowers[0]), Math.abs(wheelPowers[1])), Math.max(Math.abs(wheelPowers[2]), Math.abs(wheelPowers[3])));
if (wheelPowerMax > 1) {
if (wheelPowerMax > maxPowerScaling) {
wheelPowers[0] /= wheelPowerMax;
wheelPowers[1] /= wheelPowerMax;
wheelPowers[2] /= wheelPowerMax;
@ -126,12 +127,12 @@ public class DriveVectorScaler {
/**
* This takes in two Vectors, one static and one variable, and returns the scaling factor that,
* when multiplied to the variable Vector, results in magnitude of the sum of the static Vector
* and the scaled variable Vector being 1.
* and the scaled variable Vector being the max power scaling.
*
* IMPORTANT NOTE: I did not intend for this to be used for anything other than the method above
* this one in this class, so there will be errors if you input Vectors of length greater than 1,
* this one in this class, so there will be errors if you input Vectors of length greater than maxPowerScaling,
* and it will scale up the variable Vector if the magnitude of the sum of the two input Vectors
* isn't greater than 1. So, just don't use this elsewhere. There's gotta be a better way to do
* isn't greater than maxPowerScaling. So, just don't use this elsewhere. There's gotta be a better way to do
* whatever you're trying to do.
*
* I know that this is used outside of this class, however, I created this method so I get to
@ -140,13 +141,32 @@ public class DriveVectorScaler {
*
* @param staticVector the Vector that is held constant.
* @param variableVector the Vector getting scaled to make the sum of the input Vectors have a
* magnitude of 1.
* magnitude of maxPowerScaling.
* @return returns the scaling factor for the variable Vector.
*/
public double findNormalizingScaling(Vector staticVector, Vector variableVector) {
double a = Math.pow(variableVector.getXComponent(), 2) + Math.pow(variableVector.getYComponent(), 2);
double b = staticVector.getXComponent() * variableVector.getXComponent() + staticVector.getYComponent() * variableVector.getYComponent();
double c = Math.pow(staticVector.getXComponent(), 2) + Math.pow(staticVector.getYComponent(), 2) - 1.0;
double c = Math.pow(staticVector.getXComponent(), 2) + Math.pow(staticVector.getYComponent(), 2) - Math.pow(maxPowerScaling, 2);
return (-b + Math.sqrt(Math.pow(b, 2) - a*c))/(a);
}
/**
* Sets the maximum power that can be used by the drive vector scaler. Clamped between 0 and 1.
*
* @param maxPowerScaling setting the max power scaling
*/
public void setMaxPowerScaling(double maxPowerScaling) {
this.maxPowerScaling = MathFunctions.clamp(maxPowerScaling, 0, 1);
}
/**
* Gets the maximum power that can be used by the drive vector scaler. Ranges between 0 and 1.
*
* @return returns the max power scaling
*/
public double getMaxPowerScaling() {
return maxPowerScaling;
}
}

View File

@ -1,20 +1,23 @@
package org.firstinspires.ftc.teamcode.pedroPathing.follower;
import static org.firstinspires.ftc.teamcode.PedroConstants.*;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.drivePIDFFeedForward;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.drivePIDFSwitch;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.forwardZeroPowerAcceleration;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.headingPIDFSwitch;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.drivePIDFFeedForward;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.headingPIDFFeedForward;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.headingPIDFSwitch;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.lateralZeroPowerAcceleration;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.translationalPIDFFeedForward;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.lateralZeroPowerAcceleration;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.secondaryDrivePIDFFeedForward;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.secondaryHeadingPIDFFeedForward;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.secondaryTranslationalPIDFFeedForward;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.translationalPIDFFeedForward;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.translationalPIDFSwitch;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.useSecondaryDrivePID;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.useSecondaryHeadingPID;
@ -24,7 +27,6 @@ import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType;
@ -94,7 +96,6 @@ public class Follower {
private boolean holdPositionAtEnd;
private boolean teleopDrive;
private double maxPower = 1;
private double previousSecondaryTranslationalIntegral;
private double previousTranslationalIntegral;
private double holdPointTranslationalScaling = FollowerConstants.holdPointTranslationalScaling;
@ -153,6 +154,16 @@ public class Follower {
initialize();
}
/**
* This creates a new Follower given a HardwareMap and a localizer.
* @param hardwareMap HardwareMap required
* @param localizer the localizer you wish to use
*/
public Follower(HardwareMap hardwareMap, Localizer localizer) {
this.hardwareMap = hardwareMap;
initialize(localizer);
}
/**
* This initializes the follower.
* In this, the DriveVectorScaler and PoseUpdater is instantiated, the drive motors are
@ -197,18 +208,7 @@ public class Follower {
* @param set This caps the motor power from [0, 1].
*/
public void setMaxPower(double set) {
maxPower = MathFunctions.clamp(set, 0, 1);
}
/**
* This handles the limiting of the drive powers array to the max power.
*/
public void limitDrivePowers() {
for (int i = 0; i < drivePowers.length; i++) {
if (Math.abs(drivePowers[i]) > maxPower) {
drivePowers[i] = maxPower * MathFunctions.getSign(drivePowers[i]);
}
}
driveVectorScaler.setMaxPowerScaling(set);
}
/**
@ -368,6 +368,25 @@ public class Follower {
closestPose = currentPath.getClosestPoint(poseUpdater.getPose(), 1);
}
/**
* This holds a Point.
*
* @param point the Point to stay at.
* @param heading the heading to face.
*/
public void holdPoint(Point point, double heading) {
holdPoint(new BezierPoint(point), heading);
}
/**
* This holds a Point.
*
* @param pose the Point (as a Pose) to stay at.
*/
public void holdPoint(Pose pose) {
holdPoint(new Point(pose), pose.getHeading());
}
/**
* This follows a Path.
* This also makes the Follower hold the last Point on the Path.
@ -429,15 +448,22 @@ public class Follower {
}
/**
* This calls an update to the PoseUpdater, which updates the robot's current position estimate.
* This also updates all the Follower's PIDFs, which updates the motor powers.
* Calls an update to the PoseUpdater, which updates the robot's current position estimate.
*/
public void update() {
public void updatePose() {
poseUpdater.update();
if (drawOnDashboard) {
dashboardPoseTracker.update();
}
}
/**
* This calls an update to the PoseUpdater, which updates the robot's current position estimate.
* This also updates all the Follower's PIDFs, which updates the motor powers.
*/
public void update() {
updatePose();
if (!teleopDrive) {
if (currentPath != null) {
@ -446,8 +472,6 @@ public class Follower {
drivePowers = driveVectorScaler.getDrivePowers(MathFunctions.scalarMultiplyVector(getTranslationalCorrection(), holdPointTranslationalScaling), MathFunctions.scalarMultiplyVector(getHeadingVector(), holdPointHeadingScaling), new Vector(), poseUpdater.getPose().getHeading());
limitDrivePowers();
for (int i = 0; i < motors.size(); i++) {
motors.get(i).setPower(drivePowers[i]);
}
@ -459,8 +483,6 @@ public class Follower {
drivePowers = driveVectorScaler.getDrivePowers(getCorrectiveVector(), getHeadingVector(), getDriveVector(), poseUpdater.getPose().getHeading());
limitDrivePowers();
for (int i = 0; i < motors.size(); i++) {
motors.get(i).setPower(drivePowers[i]);
}
@ -503,8 +525,6 @@ public class Follower {
drivePowers = driveVectorScaler.getDrivePowers(getCentripetalForceCorrection(), teleopHeadingVector, teleopDriveVector, poseUpdater.getPose().getHeading());
limitDrivePowers();
for (int i = 0; i < motors.size(); i++) {
motors.get(i).setPower(drivePowers[i]);
}
@ -518,7 +538,7 @@ public class Follower {
* movement, this is the x-axis.
* @param lateralDrive determines the lateral drive vector for the robot in teleop. In field centric
* movement, this is the y-axis.
* @param heading determines the heading vector for the robot in teleop.
* @param heading determines the heading vector for the robot in teleop.
*/
public void setTeleOpMovementVectors(double forwardDrive, double lateralDrive, double heading) {
setTeleOpMovementVectors(forwardDrive, lateralDrive, heading, true);
@ -531,7 +551,7 @@ public class Follower {
* movement, this is the x-axis.
* @param lateralDrive determines the lateral drive vector for the robot in teleop. In field centric
* movement, this is the y-axis.
* @param heading determines the heading vector for the robot in teleop.
* @param heading determines the heading vector for the robot in teleop.
* @param robotCentric sets if the movement will be field or robot centric
*/
public void setTeleOpMovementVectors(double forwardDrive, double lateralDrive, double heading, boolean robotCentric) {
@ -670,19 +690,19 @@ public class Follower {
public Vector getDriveVector() {
if (!useDrive) return new Vector();
if (followingPathChain && chainIndex < currentPathChain.size() - 1) {
return new Vector(1, currentPath.getClosestPointTangentVector().getTheta());
return new Vector(driveVectorScaler.getMaxPowerScaling(), currentPath.getClosestPointTangentVector().getTheta());
}
driveError = getDriveVelocityError();
if (Math.abs(driveError) < drivePIDFSwitch && useSecondaryDrivePID) {
secondaryDrivePIDF.updateError(driveError);
driveVector = new Vector(MathFunctions.clamp(secondaryDrivePIDF.runPIDF() + secondaryDrivePIDFFeedForward * MathFunctions.getSign(driveError), -1, 1), currentPath.getClosestPointTangentVector().getTheta());
driveVector = new Vector(MathFunctions.clamp(secondaryDrivePIDF.runPIDF() + secondaryDrivePIDFFeedForward * MathFunctions.getSign(driveError), -driveVectorScaler.getMaxPowerScaling(), driveVectorScaler.getMaxPowerScaling()), currentPath.getClosestPointTangentVector().getTheta());
return MathFunctions.copyVector(driveVector);
}
drivePIDF.updateError(driveError);
driveVector = new Vector(MathFunctions.clamp(drivePIDF.runPIDF() + drivePIDFFeedForward * MathFunctions.getSign(driveError), -1, 1), currentPath.getClosestPointTangentVector().getTheta());
driveVector = new Vector(MathFunctions.clamp(drivePIDF.runPIDF() + drivePIDFFeedForward * MathFunctions.getSign(driveError), -driveVectorScaler.getMaxPowerScaling(), driveVectorScaler.getMaxPowerScaling()), currentPath.getClosestPointTangentVector().getTheta());
return MathFunctions.copyVector(driveVector);
}
@ -722,7 +742,7 @@ public class Follower {
Vector velocityErrorVector = MathFunctions.addVectors(forwardVelocityError, lateralVelocityError);
previousRawDriveError = rawDriveError;
rawDriveError = velocityErrorVector.getMagnitude() * MathFunctions.getSign(MathFunctions.dotProduct(velocityErrorVector, currentPath.getClosestPointTangentVector()));
rawDriveError = velocityErrorVector.getMagnitude() * MathFunctions.getSign(MathFunctions.dotProduct(velocityErrorVector, currentPath.getClosestPointTangentVector()));
double projection = 2 * driveErrors[1] - driveErrors[0];
@ -751,11 +771,11 @@ public class Follower {
headingError = MathFunctions.getTurnDirection(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal()) * MathFunctions.getSmallestAngleDifference(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal());
if (Math.abs(headingError) < headingPIDFSwitch && useSecondaryHeadingPID) {
secondaryHeadingPIDF.updateError(headingError);
headingVector = new Vector(MathFunctions.clamp(secondaryHeadingPIDF.runPIDF() + secondaryHeadingPIDFFeedForward * MathFunctions.getTurnDirection(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal()), -1, 1), poseUpdater.getPose().getHeading());
headingVector = new Vector(MathFunctions.clamp(secondaryHeadingPIDF.runPIDF() + secondaryHeadingPIDFFeedForward * MathFunctions.getTurnDirection(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal()), -driveVectorScaler.getMaxPowerScaling(), driveVectorScaler.getMaxPowerScaling()), poseUpdater.getPose().getHeading());
return MathFunctions.copyVector(headingVector);
}
headingPIDF.updateError(headingError);
headingVector = new Vector(MathFunctions.clamp(headingPIDF.runPIDF() + headingPIDFFeedForward * MathFunctions.getTurnDirection(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal()), -1, 1), poseUpdater.getPose().getHeading());
headingVector = new Vector(MathFunctions.clamp(headingPIDF.runPIDF() + headingPIDFFeedForward * MathFunctions.getTurnDirection(poseUpdater.getPose().getHeading(), currentPath.getClosestPointHeadingGoal()), -driveVectorScaler.getMaxPowerScaling(), driveVectorScaler.getMaxPowerScaling()), poseUpdater.getPose().getHeading());
return MathFunctions.copyVector(headingVector);
}
@ -772,7 +792,7 @@ public class Follower {
Vector translational = getTranslationalCorrection();
Vector corrective = MathFunctions.addVectors(centripetal, translational);
if (corrective.getMagnitude() > 1) {
if (corrective.getMagnitude() > driveVectorScaler.getMaxPowerScaling()) {
return MathFunctions.addVectors(centripetal, MathFunctions.scalarMultiplyVector(translational, driveVectorScaler.findNormalizingScaling(centripetal, translational)));
}
@ -821,7 +841,7 @@ public class Follower {
translationalVector = MathFunctions.addVectors(translationalVector, translationalIntegralVector);
}
translationalVector.setMagnitude(MathFunctions.clamp(translationalVector.getMagnitude(), 0, 1));
translationalVector.setMagnitude(MathFunctions.clamp(translationalVector.getMagnitude(), 0, driveVectorScaler.getMaxPowerScaling()));
this.translationalVector = MathFunctions.copyVector(translationalVector);
@ -860,7 +880,7 @@ public class Follower {
curvature = (yDoublePrime) / (Math.pow(Math.sqrt(1 + Math.pow(yPrime, 2)), 3));
}
if (Double.isNaN(curvature)) return new Vector();
centripetalVector = new Vector(MathFunctions.clamp(FollowerConstants.centripetalScaling * FollowerConstants.mass * Math.pow(MathFunctions.dotProduct(poseUpdater.getVelocity(), MathFunctions.normalizeVector(currentPath.getClosestPointTangentVector())), 2) * curvature, -1, 1), currentPath.getClosestPointTangentVector().getTheta() + Math.PI / 2 * MathFunctions.getSign(currentPath.getClosestPointNormalVector().getTheta()));
centripetalVector = new Vector(MathFunctions.clamp(FollowerConstants.centripetalScaling * FollowerConstants.mass * Math.pow(MathFunctions.dotProduct(poseUpdater.getVelocity(), MathFunctions.normalizeVector(currentPath.getClosestPointTangentVector())), 2) * curvature, -driveVectorScaler.getMaxPowerScaling(), driveVectorScaler.getMaxPowerScaling()), currentPath.getClosestPointTangentVector().getTheta() + Math.PI / 2 * MathFunctions.getSign(currentPath.getClosestPointNormalVector().getTheta()));
return centripetalVector;
}
@ -996,7 +1016,7 @@ public class Follower {
/**
* This resets the IMU, if applicable.
*/
public void resetIMU() {
private void resetIMU() throws InterruptedException {
poseUpdater.resetIMU();
}
}

View File

@ -0,0 +1,520 @@
/* MIT License
* Copyright (c) [2024] [Base 10 Assets, LLC]
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.firstinspires.ftc.teamcode.pedroPathing.localization;
import static com.qualcomm.robotcore.util.TypeConversion.byteArrayToInt;
import com.qualcomm.hardware.lynx.LynxI2cDeviceSynch;
import com.qualcomm.robotcore.hardware.I2cAddr;
import com.qualcomm.robotcore.hardware.I2cDeviceSynchDevice;
import com.qualcomm.robotcore.hardware.I2cDeviceSynchSimple;
import com.qualcomm.robotcore.hardware.configuration.annotations.DeviceProperties;
import com.qualcomm.robotcore.hardware.configuration.annotations.I2cDeviceType;
import com.qualcomm.robotcore.util.TypeConversion;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.robotcore.external.navigation.Pose2D;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
@I2cDeviceType
@DeviceProperties(
name = "goBILDA® Pinpoint Odometry Computer",
xmlTag = "goBILDAPinpoint",
description ="goBILDA® Pinpoint Odometry Computer (IMU Sensor Fusion for 2 Wheel Odometry)"
)
public class GoBildaPinpointDriver extends I2cDeviceSynchDevice<I2cDeviceSynchSimple> {
private int deviceStatus = 0;
private int loopTime = 0;
private int xEncoderValue = 0;
private int yEncoderValue = 0;
private float xPosition = 0;
private float yPosition = 0;
private float hOrientation = 0;
private float xVelocity = 0;
private float yVelocity = 0;
private float hVelocity = 0;
private static final float goBILDA_SWINGARM_POD = 13.26291192f; //ticks-per-mm for the goBILDA Swingarm Pod
private static final float goBILDA_4_BAR_POD = 19.89436789f; //ticks-per-mm for the goBILDA 4-Bar Pod
//i2c address of the device
public static final byte DEFAULT_ADDRESS = 0x31;
public GoBildaPinpointDriver(I2cDeviceSynchSimple deviceClient, boolean deviceClientIsOwned) {
super(deviceClient, deviceClientIsOwned);
this.deviceClient.setI2cAddress(I2cAddr.create7bit(DEFAULT_ADDRESS));
super.registerArmingStateCallback(false);
}
@Override
public Manufacturer getManufacturer() {
return Manufacturer.Other;
}
@Override
protected synchronized boolean doInitialize() {
((LynxI2cDeviceSynch)(deviceClient)).setBusSpeed(LynxI2cDeviceSynch.BusSpeed.FAST_400K);
return true;
}
@Override
public String getDeviceName() {
return "goBILDA® Pinpoint Odometry Computer";
}
//Register map of the i2c device
private enum Register {
DEVICE_ID (1),
DEVICE_VERSION (2),
DEVICE_STATUS (3),
DEVICE_CONTROL (4),
LOOP_TIME (5),
X_ENCODER_VALUE (6),
Y_ENCODER_VALUE (7),
X_POSITION (8),
Y_POSITION (9),
H_ORIENTATION (10),
X_VELOCITY (11),
Y_VELOCITY (12),
H_VELOCITY (13),
MM_PER_TICK (14),
X_POD_OFFSET (15),
Y_POD_OFFSET (16),
YAW_SCALAR (17),
BULK_READ (18);
private final int bVal;
Register(int bVal){
this.bVal = bVal;
}
}
//Device Status enum that captures the current fault condition of the device
public enum DeviceStatus{
NOT_READY (0),
READY (1),
CALIBRATING (1 << 1),
FAULT_X_POD_NOT_DETECTED (1 << 2),
FAULT_Y_POD_NOT_DETECTED (1 << 3),
FAULT_NO_PODS_DETECTED (1 << 2 | 1 << 3),
FAULT_IMU_RUNAWAY (1 << 4);
private final int status;
DeviceStatus(int status){
this.status = status;
}
}
//enum that captures the direction the encoders are set to
public enum EncoderDirection{
FORWARD,
REVERSED;
}
//enum that captures the kind of goBILDA odometry pods, if goBILDA pods are used
public enum GoBildaOdometryPods {
goBILDA_SWINGARM_POD,
goBILDA_4_BAR_POD;
}
//enum that captures a limited scope of read data. More options may be added in future update
public enum readData {
ONLY_UPDATE_HEADING,
}
/** Writes an int to the i2c device
@param reg the register to write the int to
@param i the integer to write to the register
*/
private void writeInt(final Register reg, int i){
deviceClient.write(reg.bVal, TypeConversion.intToByteArray(i,ByteOrder.LITTLE_ENDIAN));
}
/**
* Reads an int from a register of the i2c device
* @param reg the register to read from
* @return returns an int that contains the value stored in the read register
*/
private int readInt(Register reg){
return byteArrayToInt(deviceClient.read(reg.bVal,4), ByteOrder.LITTLE_ENDIAN);
}
/**
* Converts a byte array to a float value
* @param byteArray byte array to transform
* @param byteOrder order of byte array to convert
* @return the float value stored by the byte array
*/
private float byteArrayToFloat(byte[] byteArray, ByteOrder byteOrder){
return ByteBuffer.wrap(byteArray).order(byteOrder).getFloat();
}
/**
* Reads a float from a register
* @param reg the register to read
* @return the float value stored in that register
*/
private float readFloat(Register reg){
return byteArrayToFloat(deviceClient.read(reg.bVal,4),ByteOrder.LITTLE_ENDIAN);
}
/**
* Converts a float to a byte array
* @param value the float array to convert
* @return the byte array converted from the float
*/
private byte [] floatToByteArray (float value, ByteOrder byteOrder) {
return ByteBuffer.allocate(4).order(byteOrder).putFloat(value).array();
}
/**
* Writes a byte array to a register on the i2c device
* @param reg the register to write to
* @param bytes the byte array to write
*/
private void writeByteArray (Register reg, byte[] bytes){
deviceClient.write(reg.bVal,bytes);
}
/**
* Writes a float to a register on the i2c device
* @param reg the register to write to
* @param f the float to write
*/
private void writeFloat (Register reg, float f){
byte[] bytes = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN).putFloat(f).array();
deviceClient.write(reg.bVal,bytes);
}
/**
* Looks up the DeviceStatus enum corresponding with an int value
* @param s int to lookup
* @return the Odometry Computer state
*/
private DeviceStatus lookupStatus (int s){
if ((s & DeviceStatus.CALIBRATING.status) != 0){
return DeviceStatus.CALIBRATING;
}
boolean xPodDetected = (s & DeviceStatus.FAULT_X_POD_NOT_DETECTED.status) == 0;
boolean yPodDetected = (s & DeviceStatus.FAULT_Y_POD_NOT_DETECTED.status) == 0;
if(!xPodDetected && !yPodDetected){
return DeviceStatus.FAULT_NO_PODS_DETECTED;
}
if (!xPodDetected){
return DeviceStatus.FAULT_X_POD_NOT_DETECTED;
}
if (!yPodDetected){
return DeviceStatus.FAULT_Y_POD_NOT_DETECTED;
}
if ((s & DeviceStatus.FAULT_IMU_RUNAWAY.status) != 0){
return DeviceStatus.FAULT_IMU_RUNAWAY;
}
if ((s & DeviceStatus.READY.status) != 0){
return DeviceStatus.READY;
}
else {
return DeviceStatus.NOT_READY;
}
}
/**
* Call this once per loop to read new data from the Odometry Computer. Data will only update once this is called.
*/
public void update(){
byte[] bArr = deviceClient.read(Register.BULK_READ.bVal, 40);
deviceStatus = byteArrayToInt(Arrays.copyOfRange (bArr, 0, 4), ByteOrder.LITTLE_ENDIAN);
loopTime = byteArrayToInt(Arrays.copyOfRange (bArr, 4, 8), ByteOrder.LITTLE_ENDIAN);
xEncoderValue = byteArrayToInt(Arrays.copyOfRange (bArr, 8, 12), ByteOrder.LITTLE_ENDIAN);
yEncoderValue = byteArrayToInt(Arrays.copyOfRange (bArr, 12,16), ByteOrder.LITTLE_ENDIAN);
xPosition = byteArrayToFloat(Arrays.copyOfRange(bArr, 16,20), ByteOrder.LITTLE_ENDIAN);
yPosition = byteArrayToFloat(Arrays.copyOfRange(bArr, 20,24), ByteOrder.LITTLE_ENDIAN);
hOrientation = byteArrayToFloat(Arrays.copyOfRange(bArr, 24,28), ByteOrder.LITTLE_ENDIAN);
xVelocity = byteArrayToFloat(Arrays.copyOfRange(bArr, 28,32), ByteOrder.LITTLE_ENDIAN);
yVelocity = byteArrayToFloat(Arrays.copyOfRange(bArr, 32,36), ByteOrder.LITTLE_ENDIAN);
hVelocity = byteArrayToFloat(Arrays.copyOfRange(bArr, 36,40), ByteOrder.LITTLE_ENDIAN);
}
/**
* Call this once per loop to read new data from the Odometry Computer. This is an override of the update() function
* which allows a narrower range of data to be read from the device for faster read times. Currently ONLY_UPDATE_HEADING
* is supported.
* @param data GoBildaPinpointDriver.readData.ONLY_UPDATE_HEADING
*/
public void update(readData data) {
if (data == readData.ONLY_UPDATE_HEADING) {
hOrientation = byteArrayToFloat(deviceClient.read(Register.H_ORIENTATION.bVal, 4), ByteOrder.LITTLE_ENDIAN);
}
}
/**
* Sets the odometry pod positions relative to the point that the odometry computer tracks around.<br><br>
* The most common tracking position is the center of the robot. <br> <br>
* The X pod offset refers to how far sideways (in mm) from the tracking point the X (forward) odometry pod is. Left of the center is a positive number, right of center is a negative number. <br>
* the Y pod offset refers to how far forwards (in mm) from the tracking point the Y (strafe) odometry pod is. forward of center is a positive number, backwards is a negative number.<br>
* @param xOffset how sideways from the center of the robot is the X (forward) pod? Left increases
* @param yOffset how far forward from the center of the robot is the Y (Strafe) pod? forward increases
*/
public void setOffsets(double xOffset, double yOffset){
writeFloat(Register.X_POD_OFFSET, (float) xOffset);
writeFloat(Register.Y_POD_OFFSET, (float) yOffset);
}
/**
* Recalibrates the Odometry Computer's internal IMU. <br><br>
* <strong> Robot MUST be stationary </strong> <br><br>
* Device takes a large number of samples, and uses those as the gyroscope zero-offset. This takes approximately 0.25 seconds.
*/
public void recalibrateIMU(){writeInt(Register.DEVICE_CONTROL,1<<0);}
/**
* Resets the current position to 0,0,0 and recalibrates the Odometry Computer's internal IMU. <br><br>
* <strong> Robot MUST be stationary </strong> <br><br>
* Device takes a large number of samples, and uses those as the gyroscope zero-offset. This takes approximately 0.25 seconds.
*/
public void resetPosAndIMU(){writeInt(Register.DEVICE_CONTROL,1<<1);}
/**
* Can reverse the direction of each encoder.
* @param xEncoder FORWARD or REVERSED, X (forward) pod should increase when the robot is moving forward
* @param yEncoder FORWARD or REVERSED, Y (strafe) pod should increase when the robot is moving left
*/
public void setEncoderDirections(EncoderDirection xEncoder, EncoderDirection yEncoder){
if (xEncoder == EncoderDirection.FORWARD){
writeInt(Register.DEVICE_CONTROL,1<<5);
}
if (xEncoder == EncoderDirection.REVERSED) {
writeInt(Register.DEVICE_CONTROL,1<<4);
}
if (yEncoder == EncoderDirection.FORWARD){
writeInt(Register.DEVICE_CONTROL,1<<3);
}
if (yEncoder == EncoderDirection.REVERSED){
writeInt(Register.DEVICE_CONTROL,1<<2);
}
}
/**
* If you're using goBILDA odometry pods, the ticks-per-mm values are stored here for easy access.<br><br>
* @param pods goBILDA_SWINGARM_POD or goBILDA_4_BAR_POD
*/
public void setEncoderResolution(GoBildaOdometryPods pods){
if (pods == GoBildaOdometryPods.goBILDA_SWINGARM_POD) {
writeByteArray(Register.MM_PER_TICK, (floatToByteArray(goBILDA_SWINGARM_POD, ByteOrder.LITTLE_ENDIAN)));
}
if (pods == GoBildaOdometryPods.goBILDA_4_BAR_POD){
writeByteArray(Register.MM_PER_TICK,(floatToByteArray(goBILDA_4_BAR_POD, ByteOrder.LITTLE_ENDIAN)));
}
}
/**
* Sets the encoder resolution in ticks per mm of the odometry pods. <br>
* You can find this number by dividing the counts-per-revolution of your encoder by the circumference of the wheel.
* @param ticks_per_mm should be somewhere between 10 ticks/mm and 100 ticks/mm a goBILDA Swingarm pod is ~13.26291192
*/
public void setEncoderResolution(double ticks_per_mm){
writeByteArray(Register.MM_PER_TICK,(floatToByteArray((float) ticks_per_mm,ByteOrder.LITTLE_ENDIAN)));
}
/**
* Tuning this value should be unnecessary.<br>
* The goBILDA Odometry Computer has a per-device tuned yaw offset already applied when you receive it.<br><br>
* This is a scalar that is applied to the gyro's yaw value. Increasing it will mean it will report more than one degree for every degree the sensor fusion algorithm measures. <br><br>
* You can tune this variable by rotating the robot a large amount (10 full turns is a good starting place) and comparing the amount that the robot rotated to the amount measured.
* Rotating the robot exactly 10 times should measure 3600°. If it measures more or less, divide moved amount by the measured amount and apply that value to the Yaw Offset.<br><br>
* If you find that to get an accurate heading number you need to apply a scalar of more than 1.05, or less than 0.95, your device may be bad. Please reach out to tech@gobilda.com
* @param yawOffset A scalar for the robot's heading.
*/
public void setYawScalar(double yawOffset){
writeByteArray(Register.YAW_SCALAR,(floatToByteArray((float) yawOffset, ByteOrder.LITTLE_ENDIAN)));
}
/**
* Send a position that the Pinpoint should use to track your robot relative to. You can use this to
* update the estimated position of your robot with new external sensor data, or to run a robot
* in field coordinates. <br><br>
* This overrides the current position. <br><br>
* <strong>Using this feature to track your robot's position in field coordinates:</strong> <br>
* When you start your code, send a Pose2D that describes the starting position on the field of your robot. <br>
* Say you're on the red alliance, your robot is against the wall and closer to the audience side,
* and the front of your robot is pointing towards the center of the field.
* You can send a setPosition with something like -600mm x, -1200mm Y, and 90 degrees. The pinpoint would then always
* keep track of how far away from the center of the field you are. <br><br>
* <strong>Using this feature to update your position with additional sensors: </strong><br>
* Some robots have a secondary way to locate their robot on the field. This is commonly
* Apriltag localization in FTC, but it can also be something like a distance sensor.
* Often these external sensors are absolute (meaning they measure something about the field)
* so their data is very accurate. But they can be slower to read, or you may need to be in a very specific
* position on the field to use them. In that case, spend most of your time relying on the Pinpoint
* to determine your location. Then when you pull a new position from your secondary sensor,
* send a setPosition command with the new position. The Pinpoint will then track your movement
* relative to that new, more accurate position.
* @param pos a Pose2D describing the robot's new position.
*/
public Pose2D setPosition(Pose2D pos){
writeByteArray(Register.X_POSITION,(floatToByteArray((float) pos.getX(DistanceUnit.MM), ByteOrder.LITTLE_ENDIAN)));
writeByteArray(Register.Y_POSITION,(floatToByteArray((float) pos.getY(DistanceUnit.MM),ByteOrder.LITTLE_ENDIAN)));
writeByteArray(Register.H_ORIENTATION,(floatToByteArray((float) pos.getHeading(AngleUnit.RADIANS),ByteOrder.LITTLE_ENDIAN)));
return pos;
}
/**
* Checks the deviceID of the Odometry Computer. Should return 1.
* @return 1 if device is functional.
*/
public int getDeviceID(){return readInt(Register.DEVICE_ID);}
/**
* @return the firmware version of the Odometry Computer
*/
public int getDeviceVersion(){return readInt(Register.DEVICE_VERSION); }
public float getYawScalar(){return readFloat(Register.YAW_SCALAR); }
/**
* Device Status stores any faults the Odometry Computer may be experiencing. These faults include:
* @return one of the following states:<br>
* NOT_READY - The device is currently powering up. And has not initialized yet. RED LED<br>
* READY - The device is currently functioning as normal. GREEN LED<br>
* CALIBRATING - The device is currently recalibrating the gyro. RED LED<br>
* FAULT_NO_PODS_DETECTED - the device does not detect any pods plugged in. PURPLE LED <br>
* FAULT_X_POD_NOT_DETECTED - The device does not detect an X pod plugged in. BLUE LED <br>
* FAULT_Y_POD_NOT_DETECTED - The device does not detect a Y pod plugged in. ORANGE LED <br>
*/
public DeviceStatus getDeviceStatus(){return lookupStatus(deviceStatus); }
/**
* Checks the Odometry Computer's most recent loop time.<br><br>
* If values less than 500, or more than 1100 are commonly seen here, there may be something wrong with your device. Please reach out to tech@gobilda.com
* @return loop time in microseconds (1/1,000,000 seconds)
*/
public int getLoopTime(){return loopTime; }
/**
* Checks the Odometry Computer's most recent loop frequency.<br><br>
* If values less than 900, or more than 2000 are commonly seen here, there may be something wrong with your device. Please reach out to tech@gobilda.com
* @return Pinpoint Frequency in Hz (loops per second),
*/
public double getFrequency(){
if (loopTime != 0){
return 1000000.0/loopTime;
}
else {
return 0;
}
}
/**
* @return the raw value of the X (forward) encoder in ticks
*/
public int getEncoderX(){return xEncoderValue; }
/**
* @return the raw value of the Y (strafe) encoder in ticks
*/
public int getEncoderY(){return yEncoderValue; }
/**
* @return the estimated X (forward) position of the robot in mm
*/
public double getPosX(){return xPosition; }
/**
* @return the estimated Y (Strafe) position of the robot in mm
*/
public double getPosY(){return yPosition; }
/**
* @return the estimated H (heading) position of the robot in Radians
*/
public double getHeading(){return hOrientation;}
/**
* @return the estimated X (forward) velocity of the robot in mm/sec
*/
public double getVelX(){return xVelocity; }
/**
* @return the estimated Y (strafe) velocity of the robot in mm/sec
*/
public double getVelY(){return yVelocity; }
/**
* @return the estimated H (heading) velocity of the robot in radians/sec
*/
public double getHeadingVelocity(){return hVelocity; }
/**
* <strong> This uses its own I2C read, avoid calling this every loop. </strong>
* @return the user-set offset for the X (forward) pod
*/
public float getXOffset(){return readFloat(Register.X_POD_OFFSET);}
/**
* <strong> This uses its own I2C read, avoid calling this every loop. </strong>
* @return the user-set offset for the Y (strafe) pod
*/
public float getYOffset(){return readFloat(Register.Y_POD_OFFSET);}
/**
* @return a Pose2D containing the estimated position of the robot
*/
public Pose2D getPosition(){
return new Pose2D(DistanceUnit.MM,
xPosition,
yPosition,
AngleUnit.RADIANS,
hOrientation);
}
/**
* @return a Pose2D containing the estimated velocity of the robot, velocity is unit per second
*/
public Pose2D getVelocity(){
return new Pose2D(DistanceUnit.MM,
xVelocity,
yVelocity,
AngleUnit.RADIANS,
hVelocity);
}
}

View File

@ -1,5 +1,7 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
/**
@ -90,5 +92,14 @@ public abstract class Localizer {
/**
* This resets the IMU of the localizer, if applicable.
*/
public abstract void resetIMU();
public abstract void resetIMU() throws InterruptedException;
/**
* This is overridden to return the IMU, if there is one.
*
* @return returns the IMU if it exists
*/
public IMU getIMU() {
return null;
}
}

View File

@ -1,5 +1,7 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization;
import androidx.annotation.NonNull;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
@ -44,7 +46,7 @@ public class Pose {
* This creates a new Pose with no inputs and 0 for all values.
*/
public Pose() {
this(0,0,0);
this(0, 0, 0);
}
/**
@ -208,4 +210,10 @@ public class Pose {
public Pose copy() {
return new Pose(getX(), getY(), getHeading());
}
@NonNull
@Override
public String toString() {
return "(" + getX() + ", " + getY() + ", " + Math.toDegrees(getHeading()) + ")";
}
}

View File

@ -5,7 +5,9 @@ import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.ThreeWheelIMULocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.ThreeWheelLocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.TwoWheelLocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
@ -58,6 +60,7 @@ public class PoseUpdater {
}
this.localizer = localizer;
imu = localizer.getIMU();
}
/**
@ -259,9 +262,10 @@ public class PoseUpdater {
*/
public Vector getVelocity() {
if (currentVelocity == null) {
currentVelocity = new Vector();
currentVelocity.setOrthogonalComponents(getPose().getX() - previousPose.getX(), getPose().getY() - previousPose.getY());
currentVelocity.setMagnitude(MathFunctions.distance(getPose(), previousPose) / ((currentPoseTime - previousPoseTime) / Math.pow(10.0, 9)));
// currentVelocity = new Vector();
// currentVelocity.setOrthogonalComponents(getPose().getX() - previousPose.getX(), getPose().getY() - previousPose.getY());
// currentVelocity.setMagnitude(MathFunctions.distance(getPose(), previousPose) / ((currentPoseTime - previousPoseTime) / Math.pow(10.0, 9)));
currentVelocity = localizer.getVelocityVector();
return MathFunctions.copyVector(currentVelocity);
} else {
return MathFunctions.copyVector(currentVelocity);
@ -298,7 +302,9 @@ public class PoseUpdater {
* This resets the heading of the robot to the IMU's heading, using Road Runner's pose reset.
*/
public void resetHeadingToIMU() {
localizer.setPose(new Pose(getPose().getX(), getPose().getY(), getNormalizedIMUHeading() + startingPose.getHeading()));
if (imu != null) {
localizer.setPose(new Pose(getPose().getX(), getPose().getY(), getNormalizedIMUHeading() + startingPose.getHeading()));
}
}
/**
@ -307,7 +313,9 @@ public class PoseUpdater {
* method.
*/
public void resetHeadingToIMUWithOffsets() {
setCurrentPoseWithOffset(new Pose(getPose().getX(), getPose().getY(), getNormalizedIMUHeading() + startingPose.getHeading()));
if (imu != null) {
setCurrentPoseWithOffset(new Pose(getPose().getX(), getPose().getY(), getNormalizedIMUHeading() + startingPose.getHeading()));
}
}
/**
@ -316,7 +324,10 @@ public class PoseUpdater {
* @return returns the normalized IMU heading.
*/
public double getNormalizedIMUHeading() {
return MathFunctions.normalizeAngle(-imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
if (imu != null) {
return MathFunctions.normalizeAngle(-imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
}
return 0;
}
/**
@ -340,7 +351,7 @@ public class PoseUpdater {
/**
*
*/
public void resetIMU() {
public void resetIMU() throws InterruptedException {
localizer.resetIMU();
}
}

View File

@ -0,0 +1,272 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the DriveEncoderLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the drive encoder set up.
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
@Config
public class DriveEncoderLocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder leftFront;
private Encoder rightFront;
private Encoder leftRear;
private Encoder rightRear;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 1;
public static double STRAFE_TICKS_TO_INCHES = 1;
public static double TURN_TICKS_TO_RADIANS = 1;
public static double ROBOT_WIDTH = 1;
public static double ROBOT_LENGTH = 1;
/**
* This creates a new DriveEncoderLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public DriveEncoderLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new DriveEncoderLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public DriveEncoderLocalizer(HardwareMap map, Pose setStartPose) {
hardwareMap = map;
leftFront = new Encoder(hardwareMap.get(DcMotorEx.class, leftFrontMotorName));
leftRear = new Encoder(hardwareMap.get(DcMotorEx.class, leftRearMotorName));
rightRear = new Encoder(hardwareMap.get(DcMotorEx.class, rightRearMotorName));
rightFront = new Encoder(hardwareMap.get(DcMotorEx.class, rightFrontMotorName));
// TODO: reverse any encoders necessary
leftFront.setDirection(Encoder.REVERSE);
leftRear.setDirection(Encoder.REVERSE);
rightFront.setDirection(Encoder.FORWARD);
rightRear.setDirection(Encoder.FORWARD);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders. Then, the robot's global change in position is calculated
* using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano / Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders.
*/
public void updateEncoders() {
leftFront.update();
rightFront.update();
leftRear.update();
rightRear.update();
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
leftFront.reset();
rightFront.reset();
leftRear.reset();
rightRear.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * (leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() + rightRear.getDeltaPosition()));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() - rightRear.getDeltaPosition()));
// theta/turning
returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() - leftRear.getDeltaPosition() + rightRear.getDeltaPosition()) / (ROBOT_WIDTH + ROBOT_LENGTH)));
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return TURN_TICKS_TO_RADIANS;
}
/**
* This does nothing since this localizer does not use the IMU.
*/
public void resetIMU() {
}
}

View File

@ -0,0 +1,227 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import com.qualcomm.hardware.sparkfun.SparkFunOTOS;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
/**
* This is the OTOSLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the SparkFun OTOS. The diagram below, which is modified from
* Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* forward (x positive)
* △
* |
* |
* /--------------\
* | |
* | |
* | || || |
* left (y positive) <--- | || || |
* | ____ |
* | ---- |
* \--------------/
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 7/20/2024
*/
public class OTOSLocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private SparkFunOTOS otos;
private SparkFunOTOS.Pose2D otosPose;
private SparkFunOTOS.Pose2D otosVel;
private SparkFunOTOS.Pose2D otosAcc;
private double previousHeading;
private double totalHeading;
/**
* This creates a new OTOSLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public OTOSLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new OTOSLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public OTOSLocalizer(HardwareMap map, Pose setStartPose) {
hardwareMap = map;
/*
TODO: If you want to use the "SparkFunOTOSCorrected" version of OTOS, then replace the
'SparkFunOTOS.class' below with 'SparkFunOTOSCorrected.class' and set the OTOS as a
"SparkFunOTOS Corrected" in your robot config
*/
// TODO: replace this with your OTOS port
otos = hardwareMap.get(SparkFunOTOS.class, "sensor_otos");
otos.setLinearUnit(DistanceUnit.INCH);
otos.setAngularUnit(AngleUnit.RADIANS);
// TODO: replace this with your OTOS offset from the center of the robot
// For the OTOS, left/right is the y axis and forward/backward is the x axis, with left being
// positive y and forward being positive x. PI/2 radians is facing forward, and clockwise
// rotation is negative rotation.
otos.setOffset(new SparkFunOTOS.Pose2D(0,0,Math.PI / 2));
// TODO: replace these with your tuned multipliers
otos.setLinearScalar(1.0);
otos.setAngularScalar(1.0);
otos.calibrateImu();
otos.resetTracking();
setStartPose(setStartPose);
otosPose = new SparkFunOTOS.Pose2D();
otosVel = new SparkFunOTOS.Pose2D();
otosAcc = new SparkFunOTOS.Pose2D();
totalHeading = 0;
previousHeading = startPose.getHeading();
resetOTOS();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
Pose pose = new Pose(otosPose.x, otosPose.y, otosPose.h);
Vector vec = pose.getVector();
vec.rotateVector(startPose.getHeading());
return MathFunctions.addPoses(startPose, new Pose(vec.getXComponent(), vec.getYComponent(), pose.getHeading()));
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return new Pose(otosVel.x, otosVel.y, otosVel.h);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return getVelocity().getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
resetOTOS();
Pose setOTOSPose = MathFunctions.subtractPoses(setPose, startPose);
otos.setPosition(new SparkFunOTOS.Pose2D(setOTOSPose.getX(), setOTOSPose.getY(), setOTOSPose.getHeading()));
}
/**
* This updates the total heading of the robot. The OTOS handles all other updates itself.
*/
@Override
public void update() {
otos.getPosVelAcc(otosPose,otosVel,otosAcc);
totalHeading += MathFunctions.getSmallestAngleDifference(otosPose.h, previousHeading);
previousHeading = otosPose.h;
}
/**
* This resets the OTOS.
*/
public void resetOTOS() {
otos.resetTracking();
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from OTOS
* ticks to inches. For the OTOS, this value is the same as the lateral multiplier.
* This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return otos.getLinearScalar();
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* OTOS ticks to inches. For the OTOS, this value is the same as the forward multiplier.
* This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return otos.getLinearScalar();
}
/**
* This returns the multiplier applied to turning movement measurement to convert from OTOS ticks
* to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return otos.getAngularScalar();
}
/**
* This does nothing since this localizer does not use the IMU.
*/
public void resetIMU() {
}
}

View File

@ -0,0 +1,245 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.robotcore.external.navigation.Pose2D;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.GoBildaPinpointDriver;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
import org.opencv.core.Mat;
/**
* This is the Pinpoint class. This class extends the Localizer superclass and is a
* localizer that uses the two wheel odometry set up with the IMU to have more accurate heading
* readings. The diagram below, which is modified from Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || |
* | || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
* With the pinpoint your readings will be used in mm
* to use inches ensure to divide your mm value by 25.4
* @author Logan Nash
* @author Havish Sripada 12808 - RevAmped Robotics
* @author Ethan Doak - Gobilda
* @version 1.0, 10/2/2024
*/
public class PinpointLocalizer extends Localizer {
private HardwareMap hardwareMap;
private GoBildaPinpointDriver odo;
private double previousHeading;
private double totalHeading;
private Pose startPose;
private long deltaTimeNano;
private NanoTimer timer;
private Pose currentVelocity;
private Pose previousPinpointPose;
/**
* This creates a new PinpointLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public PinpointLocalizer(HardwareMap map){ this(map, new Pose());}
/**
* This creates a new PinpointLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public PinpointLocalizer(HardwareMap map, Pose setStartPose){
hardwareMap = map;
odo = hardwareMap.get(GoBildaPinpointDriver.class,"pinpoint");
//The default units are inches, but you can swap the units if you wish.
//If you have already tuned the TwoWheelLocalizer, you can simply use the forwardEncoderPose's y value and strafeEncoderPose's x values.
setOffsets(-2.815, 0.125, DistanceUnit.INCH); //these are tuned for 3110-0002-0001 Product Insight #1
//TODO: Tune urself if needed
// odo.setYawScalar(1.0);
odo.setEncoderResolution(GoBildaPinpointDriver.GoBildaOdometryPods.goBILDA_4_BAR_POD);
//odo.setEncoderResolution(13.26291192);
odo.setEncoderDirections(GoBildaPinpointDriver.EncoderDirection.REVERSED, GoBildaPinpointDriver.EncoderDirection.FORWARD);
resetPinpoint();
setStartPose(setStartPose);
totalHeading = 0;
timer = new NanoTimer();
previousPinpointPose = new Pose();
currentVelocity = new Pose();
deltaTimeNano = 1;
previousHeading = setStartPose.getHeading();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, MathFunctions.rotatePose(previousPinpointPose, startPose.getHeading(), false));
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Since nobody should be using this after the robot has begun moving,
* and due to issues with the PinpointLocalizer, this is functionally the same as setPose(Pose).
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
this.startPose = setStart;
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
Pose setNewPose = MathFunctions.subtractPoses(setPose, startPose);
odo.setPosition(new Pose2D(DistanceUnit.INCH, setNewPose.getX(), setNewPose.getY(), AngleUnit.RADIANS, setNewPose.getHeading()));
}
/**
* This updates the total heading of the robot. The Pinpoint handles all other updates itself.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
odo.update();
Pose2D pinpointPose = odo.getPosition();
Pose currentPinpointPose = new Pose(pinpointPose.getX(DistanceUnit.INCH), pinpointPose.getY(DistanceUnit.INCH), pinpointPose.getHeading(AngleUnit.RADIANS));
totalHeading += MathFunctions.getSmallestAngleDifference(currentPinpointPose.getHeading(), previousHeading);
previousHeading = currentPinpointPose.getHeading();
Pose deltaPose = MathFunctions.subtractPoses(currentPinpointPose, previousPinpointPose);
currentVelocity = new Pose(deltaPose.getX() / (deltaTimeNano / Math.pow(10.0, 9)), deltaPose.getY() / (deltaTimeNano / Math.pow(10.0, 9)), deltaPose.getHeading() / (deltaTimeNano / Math.pow(10.0, 9)));
previousPinpointPose = currentPinpointPose;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
@Override
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the Y encoder value as none of the odometry tuners are required for this localizer
* @return returns the Y encoder value
*/
@Override
public double getForwardMultiplier() {
return odo.getEncoderY();
}
/**
* This returns the X encoder value as none of the odometry tuners are required for this localizer
* @return returns the X encoder value
*/
@Override
public double getLateralMultiplier() {
return odo.getEncoderX();
}
/**
* This returns either the factory tuned yaw scalar or the yaw scalar tuned by yourself.
* @return returns the yaw scalar
*/
@Override
public double getTurningMultiplier() {
return odo.getYawScalar();
}
/**
* This sets the offsets and converts inches to millimeters
* @param xOffset How far to the side from the center of the robot is the x-pod? Use positive values if it's to the left and negative if it's to the right.
* @param yOffset How far forward from the center of the robot is the y-pod? Use positive values if it's forward and negative if it's to the back.
* @param unit The units that the measurements are given in
*/
private void setOffsets(double xOffset, double yOffset, DistanceUnit unit) {
odo.setOffsets(unit.toMm(xOffset), unit.toMm(yOffset));
}
/**
* This resets the IMU. Does not change heading estimation.
*/
@Override
public void resetIMU() throws InterruptedException {
odo.recalibrateIMU();
try {
Thread.sleep(300);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
/**
* This resets the pinpoint.
*/
private void resetPinpoint() {
odo.resetPosAndIMU();
try {
Thread.sleep(300);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -0,0 +1,159 @@
//package org.firstinspires.ftc.teamcode.pedroPathing.localization;
//
//import com.acmerobotics.roadrunner.geometry.Pose2d;
//import com.qualcomm.robotcore.hardware.HardwareMap;
//
//import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
//import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
//
//import java.util.ArrayList;
//import java.util.List;
//
///**
// * This is the RRToPedroThreeWheelLocalizer class. This class extends the Localizer superclass and
// * is intended to adapt the old Road Runner three wheel odometry localizer to the new Pedro Pathing
// * localizer system.
// *
// * @author Anyi Lin - 10158 Scott's Bots
// * @version 1.0, 5/9/2024
// */
//public class RRToPedroThreeWheelLocalizer extends Localizer {
// private RoadRunnerThreeWheelLocalizer localizer;
// private double totalHeading;
// private Pose startPose;
// private Pose previousPose;
//
// /**
// * This creates a new RRToPedroThreeWheelLocalizer from a HardwareMap. This adapts the previously
// * used Road Runner localization system to the new Pedro Pathing localization system.
// *
// * @param hardwareMap the HardwareMap
// */
// public RRToPedroThreeWheelLocalizer(HardwareMap hardwareMap) {
// List<Integer> lastTrackingEncPositions = new ArrayList<>();
// List<Integer> lastTrackingEncVels = new ArrayList<>();
//
// localizer = new RoadRunnerThreeWheelLocalizer(hardwareMap, lastTrackingEncPositions, lastTrackingEncVels);
//
// startPose = new Pose();
// previousPose = new Pose();
// }
//
// /**
// * This returns the current pose estimate as a Pose.
// *
// * @return returns the current pose estimate
// */
// @Override
// public Pose getPose() {
// Pose2d pose = localizer.getPoseEstimate();
// return new Pose(pose.getX(), pose.getY(), pose.getHeading());
// }
//
// /**
// * This returns the current velocity estimate as a Pose.
// *
// * @return returns the current velocity estimate
// */
// @Override
// public Pose getVelocity() {
// Pose2d pose = localizer.getPoseVelocity();
// return new Pose(pose.getX(), pose.getY(), pose.getHeading());
// }
//
// /**
// * This returns the current velocity estimate as a Vector.
// *
// * @return returns the current velocity estimate
// */
// @Override
// public Vector getVelocityVector() {
// Pose2d pose = localizer.getPoseVelocity();
// Vector returnVector = new Vector();
// returnVector.setOrthogonalComponents(pose.getX(), pose.getY());
// return returnVector;
// }
//
// /**
// * This sets the start pose. Any movement of the robot is treated as a displacement from the
// * start pose, so moving the start pose will move the current pose estimate the same amount.
// *
// * @param setStart the new start pose
// */
// @Override
// public void setStartPose(Pose setStart) {
// Pose oldStart = startPose;
// startPose = setStart;
// Pose startDiff = MathFunctions.subtractPoses(startPose, oldStart);
// localizer.setPoseEstimate(new Pose2d(getPose().getX() + startDiff.getX(), getPose().getY() + startDiff.getY(), getPose().getHeading() + startDiff.getHeading()));
// }
//
// /**
// * This sets the current pose estimate. This has no effect on the start pose.
// *
// * @param setPose the new current pose estimate
// */
// @Override
// public void setPose(Pose setPose) {
// localizer.setPoseEstimate(new Pose2d(setPose.getX(), setPose.getY(), setPose.getHeading()));
// }
//
// /**
// * This updates the total heading and previous pose estimate. Everything else is handled by the
// * Road Runner localizer on its own, but updating this tells you how far the robot has really
// * turned.
// */
// @Override
// public void update() {
// totalHeading += MathFunctions.getTurnDirection(previousPose.getHeading(), getPose().getHeading()) * MathFunctions.getSmallestAngleDifference(previousPose.getHeading(), getPose().getHeading());
// previousPose = getPose();
// }
//
// /**
// * This returns how far the robot has actually turned.
// *
// * @return returns the total angle turned, in degrees.
// */
// @Override
// public double getTotalHeading() {
// return totalHeading;
// }
//
// /**
// * This returns the forward multiplier of the Road Runner localizer, which converts from ticks
// * to inches. You can actually use the tuners in Pedro Pathing to find the value that everything
// * multiplied together should be. If you do use that, then do be aware that the value returned is
// * the product of the Road Runner ticks to inches and the x multiplier.
// *
// * @return returns the forward multiplier
// */
// @Override
// public double getForwardMultiplier() {
// return RoadRunnerThreeWheelLocalizer.encoderTicksToInches(1) * RoadRunnerThreeWheelLocalizer.X_MULTIPLIER;
// }
//
// /**
// * This returns the lateral multiplier of the Road Runner localizer, which converts from ticks
// * to inches. You can actually use the tuners in Pedro Pathing to find the value that everything
// * multiplied together should be. If you do use that, then do be aware that the value returned is
// * the product of the Road Runner ticks to inches and the y multiplier.
// *
// * @return returns the lateral multiplier
// */
// @Override
// public double getLateralMultiplier() {
// return RoadRunnerThreeWheelLocalizer.encoderTicksToInches(1) * RoadRunnerThreeWheelLocalizer.Y_MULTIPLIER;
// }
//
// /**
// * This returns the turning multiplier of the Road Runner localizer, which doesn't actually exist.
// * There really isn't a point in tuning the turning for the Road Runner localizer. This will
// * actually just return the average of the two other multipliers.
// *
// * @return returns the turning multiplier
// */
// @Override
// public double getTurningMultiplier() {
// return (getForwardMultiplier() + getLateralMultiplier()) / 2;
// }
//}

View File

@ -0,0 +1,132 @@
//package org.firstinspires.ftc.teamcode.pedroPathing.localization;
//
//import com.acmerobotics.roadrunner.util.NanoClock;
//import com.qualcomm.robotcore.hardware.DcMotorEx;
//import com.qualcomm.robotcore.hardware.DcMotorSimple;
//
///**
// * This class is adapted from the Road Runner Encoder class. Later, this will be replaced with a
// * custom encoder class. According to Road Runner, this wraps a motor instance to provide corrected
// * velocity counts and allow reversing independently of the corresponding slot's motor direction.
// *
// * I'm fairly sure I didn't make any changes to this class, just copied it so I wouldn't have to have
// * import statements, so I'm not crediting myself as an author for this.
// *
// * @author Road Runner dev team
// * @version 1.0, 5/9/2024
// */
//public class RoadRunnerEncoder {
// private final static int CPS_STEP = 0x10000;
//
// private static double inverseOverflow(double input, double estimate) {
// // convert to uint16
// int real = (int) input & 0xffff;
// // initial, modulo-based correction: it can recover the remainder of 5 of the upper 16 bits
// // because the velocity is always a multiple of 20 cps due to Expansion Hub's 50ms measurement window
// real += ((real % 20) / 4) * CPS_STEP;
// // estimate-based correction: it finds the nearest multiple of 5 to correct the upper bits by
// real += Math.round((estimate - real) / (5 * CPS_STEP)) * 5 * CPS_STEP;
// return real;
// }
//
// public enum Direction {
// FORWARD(1),
// REVERSE(-1);
//
// private int multiplier;
//
// Direction(int multiplier) {
// this.multiplier = multiplier;
// }
//
// public int getMultiplier() {
// return multiplier;
// }
// }
//
// private DcMotorEx motor;
// private NanoClock clock;
//
// private Direction direction;
//
// private int lastPosition;
// private int velocityEstimateIdx;
// private double[] velocityEstimates;
// private double lastUpdateTime;
//
// public RoadRunnerEncoder(DcMotorEx motor, NanoClock clock) {
// this.motor = motor;
// this.clock = clock;
//
// this.direction = Direction.FORWARD;
//
// this.lastPosition = 0;
// this.velocityEstimates = new double[3];
// this.lastUpdateTime = clock.seconds();
// }
//
// public RoadRunnerEncoder(DcMotorEx motor) {
// this(motor, NanoClock.system());
// }
//
// public Direction getDirection() {
// return direction;
// }
//
// private int getMultiplier() {
// return getDirection().getMultiplier() * (motor.getDirection() == DcMotorSimple.Direction.FORWARD ? 1 : -1);
// }
//
// /**
// * Allows you to set the direction of the counts and velocity without modifying the motor's direction state
// * @param direction either reverse or forward depending on if encoder counts should be negated
// */
// public void setDirection(Direction direction) {
// this.direction = direction;
// }
//
// /**
// * Gets the position from the underlying motor and adjusts for the set direction.
// * Additionally, this method updates the velocity estimates used for compensated velocity
// *
// * @return encoder position
// */
// public int getCurrentPosition() {
// int multiplier = getMultiplier();
// int currentPosition = motor.getCurrentPosition() * multiplier;
// if (currentPosition != lastPosition) {
// double currentTime = clock.seconds();
// double dt = currentTime - lastUpdateTime;
// velocityEstimates[velocityEstimateIdx] = (currentPosition - lastPosition) / dt;
// velocityEstimateIdx = (velocityEstimateIdx + 1) % 3;
// lastPosition = currentPosition;
// lastUpdateTime = currentTime;
// }
// return currentPosition;
// }
//
// /**
// * Gets the velocity directly from the underlying motor and compensates for the direction
// * See {@link #getCorrectedVelocity} for high (>2^15) counts per second velocities (such as on REV Through Bore)
// *
// * @return raw velocity
// */
// public double getRawVelocity() {
// int multiplier = getMultiplier();
// return motor.getVelocity() * multiplier;
// }
//
// /**
// * Uses velocity estimates gathered in {@link #getCurrentPosition} to estimate the upper bits of velocity
// * that are lost in overflow due to velocity being transmitted as 16 bits.
// * CAVEAT: must regularly call {@link #getCurrentPosition} for the compensation to work correctly.
// *
// * @return corrected velocity
// */
// public double getCorrectedVelocity() {
// double median = velocityEstimates[0] > velocityEstimates[1]
// ? Math.max(velocityEstimates[1], Math.min(velocityEstimates[0], velocityEstimates[2]))
// : Math.max(velocityEstimates[0], Math.min(velocityEstimates[1], velocityEstimates[2]));
// return inverseOverflow(getRawVelocity(), median);
// }
//}

View File

@ -0,0 +1,123 @@
//package org.firstinspires.ftc.teamcode.pedroPathing.localization;
//
//import androidx.annotation.NonNull;
//
//import com.acmerobotics.dashboard.config.Config;
//import com.acmerobotics.roadrunner.geometry.Pose2d;
//import com.acmerobotics.roadrunner.localization.ThreeTrackingWheelLocalizer;
//import com.qualcomm.robotcore.hardware.DcMotorEx;
//import com.qualcomm.robotcore.hardware.HardwareMap;
//
//import java.util.Arrays;
//import java.util.List;
//
///*
// * Sample tracking wheel localizer implementation assuming the standard configuration:
// *
// * left on robot is y pos
// *
// * front of robot is x pos
// *
// * /--------------\
// * | ____ |
// * | ---- |
// * | || || |
// * | || || |
// * | |
// * | |
// * \--------------/
// *
// */
//
///**
// * This class is adapted from the Road Runner StandardTrackingWheelLocalizer class. Later, this will
// * be replaced with a custom localizer. I made some minor changes, so I'm crediting myself as an
// * 'author' of sorts, but really this is pretty much Road Runner's code, just moved to be local to
// * Pedro Pathing to avoid having imports.
// *
// * @author Road Runner dev team
// * @author Anyi Lin - 10158 Scott's Bots
// * @version 1.0, 5/9/2024
// */
//@Config
//public class RoadRunnerThreeWheelLocalizer extends ThreeTrackingWheelLocalizer {
// public static double TICKS_PER_REV = 8192;
// public static double WHEEL_RADIUS = 1.37795; // in
// public static double GEAR_RATIO = 1; // output (wheel) speed / input (encoder) speed
//
// public static double X_MULTIPLIER = 0.5008239963;
// public static double Y_MULTIPLIER = 0.5018874659;
//
// public static double leftX = -18.5/25.4 - 0.1, leftY = 164.4/25.4, rightX = -18.4/25.4 - 0.1, rightY = -159.6/25.4, strafeX = -107.9/25.4+0.25, strafeY = -1.1/25.4-0.23;
//
// private RoadRunnerEncoder leftEncoder, rightEncoder, strafeEncoder;
//
// private List<Integer> lastEncPositions, lastEncVels;
//
// public RoadRunnerThreeWheelLocalizer(HardwareMap hardwareMap, List<Integer> lastTrackingEncPositions, List<Integer> lastTrackingEncVels) {
// super(Arrays.asList(
// new Pose2d(leftX, leftY, 0), // left
// new Pose2d(rightX, rightY, 0), // right
// new Pose2d(strafeX, strafeY, Math.toRadians(90)) // strafe
// ));
//
// lastEncPositions = lastTrackingEncPositions;
// lastEncVels = lastTrackingEncVels;
//
// // TODO: redo the configs here
// leftEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "leftRear"));
// rightEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "rightFront"));
// strafeEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "strafeEncoder"));
//
// // TODO: reverse any encoders using Encoder.setDirection(Encoder.Direction.REVERSE)
// leftEncoder.setDirection(RoadRunnerEncoder.Direction.REVERSE);
// rightEncoder.setDirection(RoadRunnerEncoder.Direction.REVERSE);
// strafeEncoder.setDirection(RoadRunnerEncoder.Direction.FORWARD);
// }
//
// public void resetHeading(double heading) {
// setPoseEstimate(new Pose2d(getPoseEstimate().getX(), getPoseEstimate().getY(), heading));
// }
//
// public static double encoderTicksToInches(double ticks) {
// return WHEEL_RADIUS * 2 * Math.PI * GEAR_RATIO * ticks / TICKS_PER_REV;
// }
//
// @NonNull
// @Override
// public List<Double> getWheelPositions() {
// int leftPos = leftEncoder.getCurrentPosition();
// int rightPos = rightEncoder.getCurrentPosition();
// int frontPos = strafeEncoder.getCurrentPosition();
//
// lastEncPositions.clear();
// lastEncPositions.add(leftPos);
// lastEncPositions.add(rightPos);
// lastEncPositions.add(frontPos);
//
// return Arrays.asList(
// encoderTicksToInches(leftPos) * X_MULTIPLIER,
// encoderTicksToInches(rightPos) * X_MULTIPLIER,
// encoderTicksToInches(frontPos) * Y_MULTIPLIER
// );
// }
//
// @NonNull
// @Override
// public List<Double> getWheelVelocities() {
// int leftVel = (int) leftEncoder.getCorrectedVelocity();
// int rightVel = (int) rightEncoder.getCorrectedVelocity();
// int frontVel = (int) strafeEncoder.getCorrectedVelocity();
//
// lastEncVels.clear();
// lastEncVels.add(leftVel);
// lastEncVels.add(rightVel);
// lastEncVels.add(frontVel);
//
// return Arrays.asList(
// encoderTicksToInches(leftVel) * X_MULTIPLIER,
// encoderTicksToInches(rightVel) * X_MULTIPLIER,
// encoderTicksToInches(frontVel) * Y_MULTIPLIER
// );
// }
//}

View File

@ -0,0 +1,326 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the ThreeWheelIMULocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the three wheel odometry set up with the IMU to have more accurate heading
* readings. The diagram below, which is modified from Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* forward (x positive)
* △
* |
* |
* /--------------\
* | |
* | |
* | || || |
* left (y positive) <--- | || || |
* | ____ |
* | ---- |
* \--------------/
*
* @author Logan Nash
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 7/9/2024
*/
@Config
public class ThreeWheelIMULocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder leftEncoder;
private Encoder rightEncoder;
private Encoder strafeEncoder;
private Pose leftEncoderPose;
private Pose rightEncoderPose;
private Pose strafeEncoderPose;
public final IMU imu;
private double previousIMUOrientation;
private double deltaRadians;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 0.002957;//8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double STRAFE_TICKS_TO_INCHES = -0.003127403096038503;//8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
public static double TURN_TICKS_TO_RADIANS = 0.002995;//8192 * 1.37795 * 2 * Math.PI * 0.5;
public static boolean useIMU = true;
/**
* This creates a new ThreeWheelIMULocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public ThreeWheelIMULocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new ThreeWheelIMULocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public ThreeWheelIMULocalizer(HardwareMap map, Pose setStartPose) {
hardwareMap = map;
imu = hardwareMap.get(IMU.class, "imu");
// TODO: replace this with your IMU's orientation
imu.initialize(new IMU.Parameters(new RevHubOrientationOnRobot(RevHubOrientationOnRobot.LogoFacingDirection.LEFT, RevHubOrientationOnRobot.UsbFacingDirection.UP)));
// TODO: replace these with your encoder positions
leftEncoderPose = new Pose(-3, 5.7, 0);
rightEncoderPose = new Pose(-3, -5.7, 0);
strafeEncoderPose = new Pose(6.9, 1, Math.toRadians(90));
// TODO: replace these with your encoder ports
leftEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "lb"));
rightEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "lf"));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "rf"));
// TODO: reverse any encoders necessary
leftEncoder.setDirection(Encoder.REVERSE);
rightEncoder.setDirection(Encoder.FORWARD);
strafeEncoder.setDirection(Encoder.FORWARD);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
totalHeading = 0;
resetEncoders();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders. Then, the robot's global change in position is calculated
* using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano / Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders.
*/
public void updateEncoders() {
leftEncoder.update();
rightEncoder.update();
strafeEncoder.update();
double currentIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = MathFunctions.getTurnDirection(previousIMUOrientation, currentIMUOrientation) * MathFunctions.getSmallestAngleDifference(currentIMUOrientation, previousIMUOrientation);
previousIMUOrientation = currentIMUOrientation;
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
leftEncoder.reset();
rightEncoder.reset();
strafeEncoder.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * ((rightEncoder.getDeltaPosition() * leftEncoderPose.getY() - leftEncoder.getDeltaPosition() * rightEncoderPose.getY()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY()))));
// theta/turning
if (MathFunctions.getSmallestAngleDifference(0, deltaRadians) > 0.00005 && useIMU) {
returnMatrix.set(2, 0, deltaRadians);
} else {
returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
}
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return TURN_TICKS_TO_RADIANS;
}
/**
* This resets the IMU.
*/
public void resetIMU() {
imu.resetYaw();
}
/**
* This is returns the IMU.
*
* @return returns the IMU
*/
@Override
public IMU getIMU() {
return imu;
}
}

View File

@ -25,18 +25,18 @@ import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || || |
* | || || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
* forward (x positive)
*
* |
* |
* /--------------\
* | |
* | |
* | || || |
* left (y positive) <--- | || || |
* | ____ |
* | ---- |
* \--------------/
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
@ -206,7 +206,7 @@ public class ThreeWheelLocalizer extends Localizer {
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano * Math.pow(10.0, 9)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano / Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}

View File

@ -0,0 +1,309 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the TwoWheelLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the two wheel odometry with IMU set up. The diagram below, which is modified from
* Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* forward (x positive)
* △
* |
* |
* /--------------\
* | |
* | |
* | || |
* left (y positive) <--- | || |
* | ____ |
* | ---- |
* \--------------/
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
@Config
public class TwoWheelLocalizer extends Localizer { // todo: make two wheel odo work
private HardwareMap hardwareMap;
private IMU imu;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder forwardEncoder;
private Encoder strafeEncoder;
private Pose forwardEncoderPose;
private Pose strafeEncoderPose;
private double previousIMUOrientation;
private double deltaRadians;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double STRAFE_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
/**
* This creates a new TwoWheelLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public TwoWheelLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new TwoWheelLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public TwoWheelLocalizer(HardwareMap map, Pose setStartPose) {
// TODO: replace these with your encoder positions
forwardEncoderPose = new Pose(-18.5/25.4 - 0.1, 164.4/25.4, 0);
strafeEncoderPose = new Pose(-107.9/25.4+0.25, -1.1/25.4-0.23, Math.toRadians(90));
hardwareMap = map;
imu = hardwareMap.get(IMU.class, "imu");
// TODO: replace this with your IMU's orientation
imu.initialize(new IMU.Parameters(new RevHubOrientationOnRobot(RevHubOrientationOnRobot.LogoFacingDirection.UP, RevHubOrientationOnRobot.UsbFacingDirection.LEFT)));
// TODO: replace these with your encoder ports
forwardEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "leftRear"));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "strafeEncoder"));
// TODO: reverse any encoders necessary
forwardEncoder.setDirection(Encoder.REVERSE);
strafeEncoder.setDirection(Encoder.FORWARD);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
previousIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = 0;
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders and the IMU readings. Then, the robot's global change in
* position is calculated using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano / Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders as well as the IMU.
*/
public void updateEncoders() {
forwardEncoder.update();
strafeEncoder.update();
double currentIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = MathFunctions.getTurnDirection(previousIMUOrientation, currentIMUOrientation) * MathFunctions.getSmallestAngleDifference(currentIMUOrientation, previousIMUOrientation);
previousIMUOrientation = currentIMUOrientation;
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
forwardEncoder.reset();
strafeEncoder.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders and IMU.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * (forwardEncoder.getDeltaPosition() - forwardEncoderPose.getY() * deltaRadians));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * deltaRadians));
// theta/turning
returnMatrix.set(2,0, deltaRadians);
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return 1;
}
/**
* This resets the IMU.
*/
public void resetIMU() {
imu.resetYaw();
}
/**
* This is returns the IMU.
*
* @return returns the IMU
*/
@Override
public IMU getIMU() {
return imu;
}
}

View File

@ -0,0 +1,321 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.GoBildaPinpointDriver;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the TwoWheelPinpointIMULocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the two wheel odometry with the Pinpoint IMU set up. The diagram below, which is modified from
* Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* forward (x positive)
* △
* |
* |
* /--------------\
* | |
* | |
* | || |
* left (y positive) <--- | || |
* | ____ |
* | ---- |
* \--------------/
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Havish Sripada - 12808 RevAmped Robotics
* @author Baron Henderson - 20077 The Indubitables
* @version 1.0, 11/28/2024
*/
@Config
public class TwoWheelPinpointIMULocalizer extends Localizer {
private HardwareMap hardwareMap;
private GoBildaPinpointDriver pinpoint;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder forwardEncoder;
private Encoder strafeEncoder;
private Pose forwardEncoderPose;
private Pose strafeEncoderPose;
private double previousHeading;
private double deltaRadians;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double STRAFE_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
/**
* This creates a new TwoWheelLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public TwoWheelPinpointIMULocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new TwoWheelLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public TwoWheelPinpointIMULocalizer(HardwareMap map, Pose setStartPose) {
// TODO: replace these with your encoder positions
forwardEncoderPose = new Pose(-0.82, 6.47, 0);
strafeEncoderPose = new Pose(-4, -0.273, Math.toRadians(90));
hardwareMap = map;
pinpoint = hardwareMap.get(GoBildaPinpointDriver.class, "pinpoint");
pinpoint.resetPosAndIMU();
// TODO: replace these with your encoder ports
forwardEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "leftRear"));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "strafeEncoder"));
// TODO: reverse any encoders necessary
forwardEncoder.setDirection(Encoder.REVERSE);
strafeEncoder.setDirection(Encoder.FORWARD);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
previousHeading = startPose.getHeading();
deltaRadians = 0;
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return new Pose(startPose.getX()+displacementPose.getX(), startPose.getY()+displacementPose.getY(),displacementPose.getHeading());
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3, 3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders and the IMU readings. Then, the robot's global change in
* position is calculated using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3, 3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano / Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders as well as the IMU.
*/
public void updateEncoders() {
forwardEncoder.update();
strafeEncoder.update();
pinpoint.update(GoBildaPinpointDriver.readData.ONLY_UPDATE_HEADING);
double currentHeading = startPose.getHeading() + MathFunctions.normalizeAngle(pinpoint.getHeading());
deltaRadians = MathFunctions.getTurnDirection(previousHeading, currentHeading) * MathFunctions.getSmallestAngleDifference(currentHeading, previousHeading);
previousHeading = currentHeading;
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
forwardEncoder.reset();
strafeEncoder.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders and IMU.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3, 1);
// x/forward movement
returnMatrix.set(0, 0, FORWARD_TICKS_TO_INCHES * (forwardEncoder.getDeltaPosition() - forwardEncoderPose.getY() * deltaRadians));
//y/strafe movement
returnMatrix.set(1, 0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * deltaRadians));
// theta/turning
returnMatrix.set(2, 0, deltaRadians);
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return 1;
}
/**
* This resets the IMU.
*/
@Override
public void resetIMU() throws InterruptedException {
pinpoint.recalibrateIMU();
try {
Thread.sleep(300);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
/**
* This resets the pinpoint.
*/
private void resetPinpoint() throws InterruptedException{
pinpoint.resetPosAndIMU();
try {
Thread.sleep(300);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -0,0 +1,71 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.tuning;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.PoseUpdater;
import org.firstinspires.ftc.teamcode.pedroPathing.util.DashboardPoseTracker;
import org.firstinspires.ftc.teamcode.pedroPathing.util.Drawing;
/**
* This is the ForwardTuner OpMode. This tracks the forward movement of the robot and displays the
* necessary ticks to inches multiplier. This displayed multiplier is what's necessary to scale the
* robot's current distance in ticks to the specified distance in inches. So, to use this, run the
* tuner, then pull/push the robot to the specified distance using a ruler on the ground. When you're
* at the end of the distance, record the ticks to inches multiplier. Feel free to run multiple trials
* and average the results. Then, input the multiplier into the forward ticks to inches in your
* localizer of choice.
* You can adjust the target distance on FTC Dashboard: 192/168/43/1:8080/dash
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 5/6/2024
*/
@Config
@Autonomous(name = "Forward Localizer Tuner", group = "Autonomous Pathing Tuning")
public class ForwardTuner extends OpMode {
private PoseUpdater poseUpdater;
private DashboardPoseTracker dashboardPoseTracker;
private Telemetry telemetryA;
public static double DISTANCE = 48;
/**
* This initializes the PoseUpdater as well as the FTC Dashboard telemetry.
*/
@Override
public void init() {
poseUpdater = new PoseUpdater(hardwareMap);
dashboardPoseTracker = new DashboardPoseTracker(poseUpdater);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.addLine("Pull your robot forward " + DISTANCE + " inches. Your forward ticks to inches will be shown on the telemetry.");
telemetryA.update();
Drawing.drawRobot(poseUpdater.getPose(), "#4CAF50");
Drawing.sendPacket();
}
/**
* This updates the robot's pose estimate, and updates the FTC Dashboard telemetry with the
* calculated multiplier and draws the robot.
*/
@Override
public void loop() {
poseUpdater.update();
telemetryA.addData("distance moved", poseUpdater.getPose().getX());
telemetryA.addLine("The multiplier will display what your forward ticks to inches should be to scale your current distance to " + DISTANCE + " inches.");
telemetryA.addData("multiplier", DISTANCE / (poseUpdater.getPose().getX() / poseUpdater.getLocalizer().getForwardMultiplier()));
telemetryA.update();
Drawing.drawPoseHistory(dashboardPoseTracker, "#4CAF50");
Drawing.drawRobot(poseUpdater.getPose(), "#4CAF50");
Drawing.sendPacket();
}
}

View File

@ -33,7 +33,7 @@ public class LateralTuner extends OpMode {
private Telemetry telemetryA;
public static double DISTANCE = 30;
public static double DISTANCE = 48;
/**
* This initializes the PoseUpdater as well as the FTC Dashboard telemetry.

View File

@ -1,13 +1,13 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.tuning;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_LEFT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_RIGHT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_LEFT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_RIGHT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
@ -16,7 +16,6 @@ import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType;
import org.firstinspires.ftc.robotcore.external.Telemetry;
@ -61,11 +60,10 @@ public class LocalizationTest extends OpMode {
leftRear = hardwareMap.get(DcMotorEx.class, leftRearMotorName);
rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName);
rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName);
leftFront.setDirection(FRONT_LEFT_MOTOR_DIRECTION);
leftRear.setDirection(BACK_LEFT_MOTOR_DIRECTION);
rightFront.setDirection(FRONT_RIGHT_MOTOR_DIRECTION);
rightRear.setDirection(BACK_RIGHT_MOTOR_DIRECTION);
leftFront.setDirection(leftFrontMotorDirection);
leftRear.setDirection(leftRearMotorDirection);
rightFront.setDirection(rightFrontMotorDirection);
rightRear.setDirection(rightRearMotorDirection);
motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear);

View File

@ -0,0 +1,200 @@
/* MIT License
* Copyright (c) [2024] [Base 10 Assets, LLC]
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.firstinspires.ftc.teamcode.pedroPathing.localization.tuning;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.robotcore.external.navigation.Pose2D;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.GoBildaPinpointDriver;
import java.util.Locale;
/*
This opmode shows how to use the goBILDA® Pinpoint Odometry Computer.
The goBILDA Odometry Computer is a device designed to solve the Pose Exponential calculation
commonly associated with Dead Wheel Odometry systems. It reads two encoders, and an integrated
system of senors to determine the robot's current heading, X position, and Y position.
it uses an ESP32-S3 as a main cpu, with an STM LSM6DSV16X IMU.
It is validated with goBILDA "Dead Wheel" Odometry pods, but should be compatible with any
quadrature rotary encoder. The ESP32 PCNT peripheral is speced to decode quadrature encoder signals
at a maximum of 40mhz per channel. Though the maximum in-application tested number is 130khz.
The device expects two perpendicularly mounted Dead Wheel pods. The encoder pulses are translated
into mm and their readings are transformed by an "offset", this offset describes how far away
the pods are from the "tracking point", usually the center of rotation of the robot.
Dead Wheel pods should both increase in count when moved forwards and to the left.
The gyro will report an increase in heading when rotated counterclockwise.
The Pose Exponential algorithm used is described on pg 181 of this book:
https://github.com/calcmogul/controls-engineering-in-frc
For support, contact tech@gobilda.com
-Ethan Doak
*/
//TODO: If tuning comment out the @Disabled
@TeleOp(name="goBILDA® PinPoint Odometry Example", group="Linear OpMode")
@Disabled
public class SensorGoBildaPinpointExample extends LinearOpMode {
GoBildaPinpointDriver odo; // Declare OpMode member for the Odometry Computer
double oldTime = 0;
@Override
public void runOpMode() {
// Initialize the hardware variables. Note that the strings used here must correspond
// to the names assigned during the robot configuration step on the DS or RC devices.
odo = hardwareMap.get(GoBildaPinpointDriver.class,"odo");
/*
Set the odometry pod positions relative to the point that the odometry computer tracks around.
The X pod offset refers to how far sideways from the tracking point the
X (forward) odometry pod is. Left of the center is a positive number,
right of center is a negative number. the Y pod offset refers to how far forwards from
the tracking point the Y (strafe) odometry pod is. forward of center is a positive number,
backwards is a negative number.
*/
odo.setOffsets(-84.0, -168.0); //these are tuned for 3110-0002-0001 Product Insight #1
/*
Set the kind of pods used by your robot. If you're using goBILDA odometry pods, select either
the goBILDA_SWINGARM_POD, or the goBILDA_4_BAR_POD.
If you're using another kind of odometry pod, uncomment setEncoderResolution and input the
number of ticks per mm of your odometry pod.
*/
odo.setEncoderResolution(GoBildaPinpointDriver.GoBildaOdometryPods.goBILDA_4_BAR_POD);
//odo.setEncoderResolution(13.26291192);
/*
Set the direction that each of the two odometry pods count. The X (forward) pod should
increase when you move the robot forward. And the Y (strafe) pod should increase when
you move the robot to the left.
*/
odo.setEncoderDirections(GoBildaPinpointDriver.EncoderDirection.FORWARD, GoBildaPinpointDriver.EncoderDirection.FORWARD);
/*
Before running the robot, recalibrate the IMU. This needs to happen when the robot is stationary
The IMU will automatically calibrate when first powered on, but recalibrating before running
the robot is a good idea to ensure that the calibration is "good".
resetPosAndIMU will reset the position to 0,0,0 and also recalibrate the IMU.
This is recommended before you run your autonomous, as a bad initial calibration can cause
an incorrect starting value for x, y, and heading.
*/
//odo.recalibrateIMU();
odo.resetPosAndIMU();
telemetry.addData("Status", "Initialized");
telemetry.addData("X offset", odo.getXOffset());
telemetry.addData("Y offset", odo.getYOffset());
telemetry.addData("Device Version Number:", odo.getDeviceVersion());
telemetry.addData("Device Scalar", odo.getYawScalar());
telemetry.update();
// Wait for the game to start (driver presses START)
waitForStart();
resetRuntime();
// run until the end of the match (driver presses STOP)
while (opModeIsActive()) {
/*
Request an update from the Pinpoint odometry computer. This checks almost all outputs
from the device in a single I2C read.
*/
odo.update();
/*
Optionally, you can update only the heading of the device. This takes less time to read, but will not
pull any other data. Only the heading (which you can pull with getHeading() or in getPosition().
*/
//odo.update(GoBildaPinpointDriver.readData.ONLY_UPDATE_HEADING);
if (gamepad1.a){
odo.resetPosAndIMU(); //resets the position to 0 and recalibrates the IMU
}
if (gamepad1.b){
odo.recalibrateIMU(); //recalibrates the IMU without resetting position
}
/*
This code prints the loop frequency of the REV Control Hub. This frequency is effected
by I²C reads/writes. So it's good to keep an eye on. This code calculates the amount
of time each cycle takes and finds the frequency (number of updates per second) from
that cycle time.
*/
double newTime = getRuntime();
double loopTime = newTime-oldTime;
double frequency = 1/loopTime;
oldTime = newTime;
/*
gets the current Position (x & y in mm, and heading in degrees) of the robot, and prints it.
*/
Pose2D pos = odo.getPosition();
String data = String.format(Locale.US, "{X: %.3f, Y: %.3f, H: %.3f}", pos.getX(DistanceUnit.MM), pos.getY(DistanceUnit.MM), pos.getHeading(AngleUnit.DEGREES));
telemetry.addData("Position", data);
/*
gets the current Velocity (x & y in mm/sec and heading in degrees/sec) and prints it.
*/
Pose2D vel = odo.getVelocity();
String velocity = String.format(Locale.US,"{XVel: %.3f, YVel: %.3f, HVel: %.3f}", vel.getX(DistanceUnit.MM), vel.getY(DistanceUnit.MM), vel.getHeading(AngleUnit.DEGREES));
telemetry.addData("Velocity", velocity);
/*
Gets the Pinpoint device status. Pinpoint can reflect a few states. But we'll primarily see
READY: the device is working as normal
CALIBRATING: the device is calibrating and outputs are put on hold
NOT_READY: the device is resetting from scratch. This should only happen after a power-cycle
FAULT_NO_PODS_DETECTED - the device does not detect any pods plugged in
FAULT_X_POD_NOT_DETECTED - The device does not detect an X pod plugged in
FAULT_Y_POD_NOT_DETECTED - The device does not detect a Y pod plugged in
*/
telemetry.addData("Status", odo.getDeviceStatus());
telemetry.addData("Pinpoint Frequency", odo.getFrequency()); //prints/gets the current refresh rate of the Pinpoint
telemetry.addData("REV Hub Frequency: ", frequency); //prints the control system refresh rate
telemetry.update();
}
}}

View File

@ -27,16 +27,16 @@ public class MathFunctions {
double denom = 1;
// this multiplies up the numerator of the nCr function
for (int i = n; i > n-r; i--) {
for (int i = n; i > n - r; i--) {
num *= i;
}
// this multiplies up the denominator of the nCr function
for (int i = 1; i <=r; i++) {
for (int i = 1; i <= r; i++) {
denom *= i;
}
return num/denom;
return num / denom;
}
/**
@ -67,7 +67,7 @@ public class MathFunctions {
/**
* This normalizes an angle to be between 0 and 2 pi radians, inclusive.
*
* <p>
* IMPORTANT NOTE: This method operates in radians.
*
* @param angleRadians the angle to be normalized.
@ -75,8 +75,8 @@ public class MathFunctions {
*/
public static double normalizeAngle(double angleRadians) {
double angle = angleRadians;
while (angle<0) angle += 2*Math.PI;
while (angle>2*Math.PI) angle -= 2*Math.PI;
while (angle < 0) angle += 2 * Math.PI;
while (angle > 2 * Math.PI) angle -= 2 * Math.PI;
return angle;
}
@ -88,7 +88,7 @@ public class MathFunctions {
* @return returns the smallest angle.
*/
public static double getSmallestAngleDifference(double one, double two) {
return Math.min(MathFunctions.normalizeAngle(one-two), MathFunctions.normalizeAngle(two-one));
return Math.min(MathFunctions.normalizeAngle(one - two), MathFunctions.normalizeAngle(two - one));
}
/**
@ -98,7 +98,7 @@ public class MathFunctions {
* @return returns the turn direction.
*/
public static double getTurnDirection(double startHeading, double endHeading) {
if (MathFunctions.normalizeAngle(endHeading-startHeading) >= 0 && MathFunctions.normalizeAngle(endHeading-startHeading) <= Math.PI) {
if (MathFunctions.normalizeAngle(endHeading - startHeading) >= 0 && MathFunctions.normalizeAngle(endHeading - startHeading) <= Math.PI) {
return 1; // counter clock wise
}
return -1; // clock wise
@ -112,7 +112,7 @@ public class MathFunctions {
* @return returns the distance between the two.
*/
public static double distance(Pose pose, Point point) {
return Math.sqrt(Math.pow(pose.getX()-point.getX(), 2) + Math.pow(pose.getY()-point.getY(), 2));
return Math.sqrt(Math.pow(pose.getX() - point.getX(), 2) + Math.pow(pose.getY() - point.getY(), 2));
}
/**
@ -123,7 +123,7 @@ public class MathFunctions {
* @return returns the distance between the two.
*/
public static double distance(Pose one, Pose two) {
return Math.sqrt(Math.pow(one.getX()-two.getX(), 2) + Math.pow(one.getY()-two.getY(), 2));
return Math.sqrt(Math.pow(one.getX() - two.getX(), 2) + Math.pow(one.getY() - two.getY(), 2));
}
/**
@ -172,6 +172,22 @@ public class MathFunctions {
return new Pose(one.getX() - two.getX(), one.getY() - two.getY(), one.getHeading() - two.getHeading());
}
/**
* This rotates the given pose by the given theta,
*
* @param pose the Pose to rotate.
* @param theta the angle to rotate by.
* @param rotateHeading whether to adjust the Pose heading too.
* @return the rotated Pose.
*/
public static Pose rotatePose(Pose pose, double theta, boolean rotateHeading) {
double x = pose.getX() * Math.cos(theta) - pose.getY() * Math.sin(theta);
double y = pose.getX() * Math.sin(theta) + pose.getY() * Math.cos(theta);
double heading = rotateHeading ? normalizeAngle(pose.getHeading() + theta) : pose.getHeading();
return new Pose(x, y, heading);
}
/**
* This multiplies a Point by a scalar and returns the result as a Point
*
@ -180,7 +196,7 @@ public class MathFunctions {
* @return returns the scaled Point.
*/
public static Point scalarMultiplyPoint(Point point, double scalar) {
return new Point(point.getX()*scalar, point.getY()*scalar, Point.CARTESIAN);
return new Point(point.getX() * scalar, point.getY() * scalar, Point.CARTESIAN);
}
/**
@ -213,7 +229,7 @@ public class MathFunctions {
* @return returns the scaled Vector.
*/
public static Vector scalarMultiplyVector(Vector vector, double scalar) {
return new Vector(vector.getMagnitude()*scalar, vector.getTheta());
return new Vector(vector.getMagnitude() * scalar, vector.getTheta());
}
/**
@ -227,7 +243,7 @@ public class MathFunctions {
if (vector.getMagnitude() == 0) {
return new Vector(0.0, vector.getTheta());
} else {
return new Vector(vector.getMagnitude()/Math.abs(vector.getMagnitude()), vector.getTheta());
return new Vector(vector.getMagnitude() / Math.abs(vector.getMagnitude()), vector.getTheta());
}
}
@ -240,7 +256,7 @@ public class MathFunctions {
*/
public static Vector addVectors(Vector one, Vector two) {
Vector returnVector = new Vector();
returnVector.setOrthogonalComponents(one.getXComponent()+two.getXComponent(), one.getYComponent()+two.getYComponent());
returnVector.setOrthogonalComponents(one.getXComponent() + two.getXComponent(), one.getYComponent() + two.getYComponent());
return returnVector;
}
@ -254,7 +270,7 @@ public class MathFunctions {
*/
public static Vector subtractVectors(Vector one, Vector two) {
Vector returnVector = new Vector();
returnVector.setOrthogonalComponents(one.getXComponent()-two.getXComponent(), one.getYComponent()-two.getYComponent());
returnVector.setOrthogonalComponents(one.getXComponent() - two.getXComponent(), one.getYComponent() - two.getYComponent());
return returnVector;
}
@ -266,7 +282,7 @@ public class MathFunctions {
* @return returns the dot product of the two Vectors.
*/
public static double dotProduct(Vector one, Vector two) {
return one.getXComponent()*two.getXComponent() + one.getYComponent()*two.getYComponent();
return one.getXComponent() * two.getXComponent() + one.getYComponent() * two.getYComponent();
}
/**
@ -278,7 +294,7 @@ public class MathFunctions {
* @return returns the cross product of the two Vectors.
*/
public static double crossProduct(Vector one, Vector two) {
return one.getXComponent()*two.getYComponent() - one.getYComponent()*two.getXComponent();
return one.getXComponent() * two.getYComponent() - one.getYComponent() * two.getXComponent();
}
/**
@ -286,7 +302,7 @@ public class MathFunctions {
* specified accuracy amount.
*
* @param one first number specified.
* @param two second number specified.
* @param two Second number specified.
* @param accuracy the level of accuracy specified.
* @return returns if the two numbers are within the specified accuracy of each other.
*/

View File

@ -51,16 +51,47 @@ public class PathBuilder {
return this;
}
/**
* This adds a default Path defined by a specified BezierCurve to the PathBuilder.
*
* @param controlPoints This is the specified control points that define the BezierCurve.
* @return This returns itself with the updated data.
*/
public PathBuilder addBezierCurve(Point... controlPoints) {
return addPath(new BezierCurve(controlPoints));
}
/**
* This adds a default Path defined by a specified BezierCurve to the PathBuilder.
*
* @param controlPoints This is the specified control points that define the BezierCurve.
* @return This returns itself with the updated data.
*/
public PathBuilder addBezierCurve(ArrayList<Point> controlPoints) {
return addPath(new BezierCurve(controlPoints));
}
/**
* This adds a default Path defined by a specified BezierLine to the PathBuilder.
*
* @param startPoint start point of the line.
* @param endPoint end point of the line.
* @return This returns itself with the updated data.
*/
public PathBuilder addBezierLine(Point startPoint, Point endPoint) {
return addPath(new BezierLine(startPoint, endPoint));
}
/**
* This sets a linear heading interpolation on the last Path added to the PathBuilder.
*
* @param startHeading The start of the linear heading interpolation.
* @param endHeading The end of the linear heading interpolation.
* This will be reached at the end of the Path if no end time is specified.
* This will be reached at the end of the Path if no end time is specified.
* @return This returns itself with the updated data.
*/
public PathBuilder setLinearHeadingInterpolation(double startHeading, double endHeading) {
this.paths.get(paths.size()-1).setLinearHeadingInterpolation(startHeading, endHeading);
this.paths.get(paths.size() - 1).setLinearHeadingInterpolation(startHeading, endHeading);
return this;
}
@ -69,13 +100,13 @@ public class PathBuilder {
*
* @param startHeading The start of the linear heading interpolation.
* @param endHeading The end of the linear heading interpolation.
* This will be reached at the end of the Path if no end time is specified.
* This will be reached at the end of the Path if no end time is specified.
* @param endTime The end time on the Path that the linear heading interpolation will end.
* This value goes from [0, 1] since Bezier curves are parametric functions.
* This value goes from [0, 1] since Bezier curves are parametric functions.
* @return This returns itself with the updated data.
*/
public PathBuilder setLinearHeadingInterpolation(double startHeading, double endHeading, double endTime) {
this.paths.get(paths.size()-1).setLinearHeadingInterpolation(startHeading, endHeading, endTime);
this.paths.get(paths.size() - 1).setLinearHeadingInterpolation(startHeading, endHeading, endTime);
return this;
}
@ -86,7 +117,7 @@ public class PathBuilder {
* @return This returns itself with the updated data.
*/
public PathBuilder setConstantHeadingInterpolation(double setHeading) {
this.paths.get(paths.size()-1).setConstantHeadingInterpolation(setHeading);
this.paths.get(paths.size() - 1).setConstantHeadingInterpolation(setHeading);
return this;
}
@ -94,20 +125,21 @@ public class PathBuilder {
* This sets a reversed or tangent heading interpolation on the last Path added to the PathBuilder.
*
* @param set This sets the heading to reversed tangent following if this parameter is true.
* Conversely, this sets a tangent following if this parameter is false.
* Conversely, this sets a tangent following if this parameter is false.
* @return This returns itself with the updated data.
*/
public PathBuilder setReversed(boolean set) {
this.paths.get(paths.size()-1).setReversed(set);
this.paths.get(paths.size() - 1).setReversed(set);
return this;
}
/**
* This sets the heading interpolation to tangential on the last Path added to the PathBuilder.
* There really shouldn't be a reason to use this since the default heading interpolation is
* tangential but it's here.
*/
public PathBuilder setTangentHeadingInterpolation() {
this.paths.get(paths.size()-1).setTangentHeadingInterpolation();
this.paths.get(paths.size() - 1).setTangentHeadingInterpolation();
return this;
}
@ -118,7 +150,7 @@ public class PathBuilder {
* @return This returns itself with the updated data.
*/
public PathBuilder setZeroPowerAccelerationMultiplier(double set) {
this.paths.get(paths.size()-1).setZeroPowerAccelerationMultiplier(set);
this.paths.get(paths.size() - 1).setZeroPowerAccelerationMultiplier(set);
return this;
}
@ -129,7 +161,7 @@ public class PathBuilder {
* @return This returns itself with the updated data.
*/
public PathBuilder setPathEndVelocityConstraint(double set) {
this.paths.get(paths.size()-1).setPathEndVelocityConstraint(set);
this.paths.get(paths.size() - 1).setPathEndVelocityConstraint(set);
return this;
}
@ -140,7 +172,7 @@ public class PathBuilder {
* @return This returns itself with the updated data.
*/
public PathBuilder setPathEndTranslationalConstraint(double set) {
this.paths.get(paths.size()-1).setPathEndTranslationalConstraint(set);
this.paths.get(paths.size() - 1).setPathEndTranslationalConstraint(set);
return this;
}
@ -151,7 +183,7 @@ public class PathBuilder {
* @return This returns itself with the updated data.
*/
public PathBuilder setPathEndHeadingConstraint(double set) {
this.paths.get(paths.size()-1).setPathEndHeadingConstraint(set);
this.paths.get(paths.size() - 1).setPathEndHeadingConstraint(set);
return this;
}
@ -162,7 +194,7 @@ public class PathBuilder {
* @return This returns itself with the updated data.
*/
public PathBuilder setPathEndTValueConstraint(double set) {
this.paths.get(paths.size()-1).setPathEndTValueConstraint(set);
this.paths.get(paths.size() - 1).setPathEndTValueConstraint(set);
return this;
}
@ -173,7 +205,7 @@ public class PathBuilder {
* @return This returns itself with the updated data.
*/
public PathBuilder setPathEndTimeoutConstraint(double set) {
this.paths.get(paths.size()-1).setPathEndTimeoutConstraint(set);
this.paths.get(paths.size() - 1).setPathEndTimeoutConstraint(set);
return this;
}
@ -182,12 +214,12 @@ public class PathBuilder {
* This callback is set to run at a specified number of milliseconds after the start of the path.
*
* @param time This sets the number of milliseconds of wait between the start of the Path and
* the calling of the callback.
* the calling of the callback.
* @param runnable This sets the code for the callback to run. Use lambda statements for this.
* @return This returns itself with the updated data.
*/
public PathBuilder addTemporalCallback(double time, Runnable runnable) {
this.callbacks.add(new PathCallback(time, runnable, PathCallback.TIME, paths.size()-1));
this.callbacks.add(new PathCallback(time, runnable, PathCallback.TIME, paths.size() - 1));
return this;
}
@ -200,7 +232,7 @@ public class PathBuilder {
* @return This returns itself with the updated data.
*/
public PathBuilder addParametricCallback(double t, Runnable runnable) {
this.callbacks.add(new PathCallback(t, runnable, PathCallback.PARAMETRIC, paths.size()-1));
this.callbacks.add(new PathCallback(t, runnable, PathCallback.PARAMETRIC, paths.size() - 1));
return this;
}

View File

@ -1,5 +1,7 @@
package org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration;
import androidx.annotation.NonNull;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
/**
@ -31,13 +33,13 @@ public class Point {
* This creates a new Point with coordinate inputs and a specified coordinate system.
*
* @param rOrX Depending on the coordinate system specified, this is either the r or x value.
* In polar coordinates, the r value is the distance from the origin.
* In Cartesian coordinates, the x value is the distance left/right from the origin.
* In polar coordinates, the r value is the distance from the origin.
* In Cartesian coordinates, the x value is the distance left/right from the origin.
* @param thetaOrY Depending on the coordinate system specified, this is either the theta or
* y value.
* In polar coordinates, the theta value is the angle from the positive x-axis.
* Increasing theta moves in the counter-clockwise direction.
* In Cartesian coordinates, the y value is the distance up/down from the origin.
* y value.
* In polar coordinates, the theta value is the angle from the positive x-axis.
* Increasing theta moves in the counter-clockwise direction.
* In Cartesian coordinates, the y value is the distance up/down from the origin.
* @param identifier this specifies what coordinate system the coordinate inputs are in.
*/
public Point(double rOrX, double thetaOrY, int identifier) {
@ -53,17 +55,27 @@ public class Point {
setCoordinates(pose.getX(), pose.getY(), CARTESIAN);
}
/**
* This creates a new Point from a X and Y value.
*
* @param setX the X value.
* @param setY the Y value.
*/
public Point(double setX, double setY) {
setCoordinates(setX, setY, CARTESIAN);
}
/**
* This sets the coordinates of the Point using the specified coordinate system.
*
* @param rOrX Depending on the coordinate system specified, this is either the r or x value.
* In polar coordinates, the r value is the distance from the origin.
* In Cartesian coordinates, the x value is the distance left/right from the origin.
* In polar coordinates, the r value is the distance from the origin.
* In Cartesian coordinates, the x value is the distance left/right from the origin.
* @param thetaOrY Depending on the coordinate system specified, this is either the theta or
* y value.
* In polar coordinates, the theta value is the angle from the positive x-axis.
* Increasing theta moves in the counter-clockwise direction.
* In Cartesian coordinates, the y value is the distance up/down from the origin.
* y value.
* In polar coordinates, the theta value is the angle from the positive x-axis.
* Increasing theta moves in the counter-clockwise direction.
* In Cartesian coordinates, the y value is the distance up/down from the origin.
* @param identifier this specifies what coordinate system to use when setting values.
*/
public void setCoordinates(double rOrX, double thetaOrY, int identifier) {
@ -78,9 +90,9 @@ public class Point {
theta = setOtherCoordinates[1];
break;
default:
if (rOrX<0) {
if (rOrX < 0) {
r = -rOrX;
theta = MathFunctions.normalizeAngle(thetaOrY+Math.PI);
theta = MathFunctions.normalizeAngle(thetaOrY + Math.PI);
} else {
r = rOrX;
theta = MathFunctions.normalizeAngle(thetaOrY);
@ -99,7 +111,7 @@ public class Point {
* @return returns the distance between the two Points.
*/
public double distanceFrom(Point otherPoint) {
return Math.sqrt(Math.pow(otherPoint.getX()-x, 2) + Math.pow(otherPoint.getY()-y, 2));
return Math.sqrt(Math.pow(otherPoint.getX() - x, 2) + Math.pow(otherPoint.getY() - y, 2));
}
/**
@ -110,7 +122,7 @@ public class Point {
* @return this returns the x and y values, in that order, in an Array of doubles.
*/
public static double[] polarToCartesian(double r, double theta) {
return new double[] {r * Math.cos(theta), r * Math.sin(theta)};
return new double[]{r * Math.cos(theta), r * Math.sin(theta)};
}
/**
@ -123,17 +135,17 @@ public class Point {
public static double[] cartesianToPolar(double x, double y) {
if (x == 0) {
if (y > 0) {
return new double[] {Math.abs(y), Math.PI/2};
return new double[]{Math.abs(y), Math.PI / 2};
} else {
return new double[] {Math.abs(y), (3 * Math.PI) / 2};
return new double[]{Math.abs(y), (3 * Math.PI) / 2};
}
}
double r = Math.sqrt(x*x+y*y);
if (x < 0) return new double[] {r, Math.PI+Math.atan(y/x)};
double r = Math.sqrt(x * x + y * y);
if (x < 0) return new double[]{r, Math.PI + Math.atan(y / x)};
if (y > 0) {
return new double[]{r, Math.atan(y / x)};
} else {
return new double[]{r, (2*Math.PI) + Math.atan(y / x)};
return new double[]{r, (2 * Math.PI) + Math.atan(y / x)};
}
}
@ -182,4 +194,10 @@ public class Point {
public Point copy() {
return new Point(getX(), getY(), CARTESIAN);
}
@NonNull
@Override
public String toString() {
return "(" + getX() + ", " + getY() + ")";
}
}

View File

@ -30,6 +30,11 @@ public class FollowerConstants {
public static String rightFrontMotorName = FRONT_RIGHT_MOTOR;
public static String rightRearMotorName = BACK_RIGHT_MOTOR;
public static DcMotorSimple.Direction leftFrontMotorDirection = LEFT_FRONT_MOTOR_DIRECTION;
public static DcMotorSimple.Direction rightFrontMotorDirection = FRONT_RIGHT_MOTOR_DIRECTION;
public static DcMotorSimple.Direction leftRearMotorDirection = REAR_LEFT_MOTOR_DIRECTION;
public static DcMotorSimple.Direction rightRearMotorDirection = REAR_RIGHT_MOTOR_DIRECTION;
// This section is for setting the actual drive vector for the front left wheel, if the robot
// is facing a heading of 0 radians with the wheel centered at (0,0)
private static final double xMovement = ROBOT_SPEED_FORWARD;
@ -184,7 +189,7 @@ public class FollowerConstants {
// the limit at which the heading PIDF switches between the main and secondary heading PIDFs
public static double headingPIDFSwitch = Math.PI/20;
public static double headingPIDFSwitch = Math.PI / 20;
// Secondary heading error PIDF coefficients
public static CustomPIDFCoefficients secondaryHeadingPIDFCoefficients = new CustomPIDFCoefficients(

View File

@ -4,6 +4,10 @@ import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstan
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
@ -12,7 +16,6 @@ import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType;
import org.firstinspires.ftc.robotcore.external.Telemetry;
@ -40,7 +43,7 @@ import java.util.List;
* @version 1.0, 3/13/2024
*/
@Config
@Autonomous (name = "Forward Velocity Tuner", group = "Autonomous Pathing Tuning")
@Autonomous(name = "Forward Velocity Tuner", group = "Autonomous Pathing Tuning")
public class ForwardVelocityTuner extends OpMode {
private ArrayList<Double> velocities = new ArrayList<>();
@ -52,7 +55,7 @@ public class ForwardVelocityTuner extends OpMode {
private PoseUpdater poseUpdater;
public static double DISTANCE = 40;
public static double DISTANCE = 48;
public static double RECORD_NUMBER = 10;
private Telemetry telemetryA;
@ -71,10 +74,10 @@ public class ForwardVelocityTuner extends OpMode {
leftRear = hardwareMap.get(DcMotorEx.class, leftRearMotorName);
rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName);
rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName);
// TODO: Make sure that this is the direction your motors need to be reversed in.
leftFront.setDirection(DcMotorSimple.Direction.REVERSE);
leftRear.setDirection(DcMotorSimple.Direction.REVERSE);
leftFront.setDirection(leftFrontMotorDirection);
leftRear.setDirection(leftRearMotorDirection);
rightFront.setDirection(rightFrontMotorDirection);
rightRear.setDirection(rightRearMotorDirection);
motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear);
@ -93,13 +96,11 @@ public class ForwardVelocityTuner extends OpMode {
}
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
// telemetryA.addLine("The robot will run at 1 power until it reaches " + DISTANCE + " inches forward.");
// telemetryA.addLine("Make sure you have enough room, since the robot has inertia after cutting power.");
// telemetryA.addLine("After running the distance, the robot will cut power from the drivetrain and display the forward velocity.");
// telemetryA.addLine("Press CROSS or A on game pad 1 to stop.");
//
//
// telemetryA.update();
telemetryA.addLine("The robot will run at 1 power until it reaches " + DISTANCE + " inches forward.");
telemetryA.addLine("Make sure you have enough room, since the robot has inertia after cutting power.");
telemetryA.addLine("After running the distance, the robot will cut power from the drivetrain and display the forward velocity.");
telemetryA.addLine("Press CROSS or A on game pad 1 to stop.");
telemetryA.update();
}
@ -124,6 +125,10 @@ public class ForwardVelocityTuner extends OpMode {
@Override
public void loop() {
if (gamepad1.cross || gamepad1.a) {
for (DcMotorEx motor : motors) {
motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
motor.setPower(0);
}
requestOpModeStop();
}
@ -140,14 +145,14 @@ public class ForwardVelocityTuner extends OpMode {
velocities.add(currentVelocity);
velocities.remove(0);
}
telemetryA.addData("x", poseUpdater.getPose().getX());
telemetryA.addData("y", poseUpdater.getPose().getY());
telemetryA.addData("heading", poseUpdater.getPose().getHeading());
telemetryA.addData("velo mag", poseUpdater.getVelocity().getMagnitude());
telemetryA.addData("velo ", poseUpdater.getVelocity().getTheta());
telemetryA.update();
} else {
leftFront.setPower(0);
leftRear.setPower(0);
rightFront.setPower(0);
rightRear.setPower(0);
for (DcMotorEx motor : motors) {
motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
}
double average = 0;
for (Double velocity : velocities) {
average += velocity;
@ -155,12 +160,7 @@ public class ForwardVelocityTuner extends OpMode {
average /= (double) velocities.size();
telemetryA.addData("forward velocity:", average);
// telemetryA.addData("x", poseUpdater.getPose().getX());
// telemetryA.addData("y", poseUpdater.getPose().getY());
// telemetryA.addData("heading", poseUpdater.getPose().getHeading());
// telemetryA.addData("velo mag", poseUpdater.getVelocity().getMagnitude());
// telemetryA.addData("velo ", poseUpdater.getVelocity().getTheta());
// telemetryA.update();
telemetryA.update();
}
}
}

View File

@ -4,6 +4,10 @@ import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstan
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
@ -12,7 +16,6 @@ import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType;
import org.firstinspires.ftc.robotcore.external.Telemetry;
@ -40,7 +43,7 @@ import java.util.List;
* @version 1.0, 3/13/2024
*/
@Config
@Autonomous (name = "Forward Zero Power Acceleration Tuner", group = "Autonomous Pathing Tuning")
@Autonomous(name = "Forward Zero Power Acceleration Tuner", group = "Autonomous Pathing Tuning")
public class ForwardZeroPowerAccelerationTuner extends OpMode {
private ArrayList<Double> accelerations = new ArrayList<>();
@ -74,10 +77,10 @@ public class ForwardZeroPowerAccelerationTuner extends OpMode {
leftRear = hardwareMap.get(DcMotorEx.class, leftRearMotorName);
rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName);
rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName);
// TODO: Make sure that this is the direction your motors need to be reversed in.
leftFront.setDirection(DcMotorSimple.Direction.REVERSE);
leftRear.setDirection(DcMotorSimple.Direction.REVERSE);
leftFront.setDirection(leftFrontMotorDirection);
leftRear.setDirection(leftRearMotorDirection);
rightFront.setDirection(rightFrontMotorDirection);
rightRear.setDirection(rightRearMotorDirection);
motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear);
@ -120,6 +123,10 @@ public class ForwardZeroPowerAccelerationTuner extends OpMode {
@Override
public void loop() {
if (gamepad1.cross || gamepad1.a) {
for (DcMotorEx motor : motors) {
motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
motor.setPower(0);
}
requestOpModeStop();
}
@ -149,7 +156,7 @@ public class ForwardZeroPowerAccelerationTuner extends OpMode {
for (Double acceleration : accelerations) {
average += acceleration;
}
average /= (double)accelerations.size();
average /= (double) accelerations.size();
telemetryA.addData("forward zero power acceleration (deceleration):", average);
telemetryA.update();

View File

@ -4,6 +4,10 @@ import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstan
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
@ -12,7 +16,6 @@ import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType;
import org.firstinspires.ftc.robotcore.external.Telemetry;
@ -40,7 +43,7 @@ import java.util.List;
* @version 1.0, 3/13/2024
*/
@Config
@Autonomous (name = "Lateral Zero Power Acceleration Tuner", group = "Autonomous Pathing Tuning")
@Autonomous(name = "Lateral Zero Power Acceleration Tuner", group = "Autonomous Pathing Tuning")
public class LateralZeroPowerAccelerationTuner extends OpMode {
private ArrayList<Double> accelerations = new ArrayList<>();
@ -52,7 +55,7 @@ public class LateralZeroPowerAccelerationTuner extends OpMode {
private PoseUpdater poseUpdater;
public static double VELOCITY = 50;
public static double VELOCITY = 30;
private double previousVelocity;
@ -74,10 +77,10 @@ public class LateralZeroPowerAccelerationTuner extends OpMode {
leftRear = hardwareMap.get(DcMotorEx.class, leftRearMotorName);
rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName);
rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName);
// TODO: Make sure that this is the direction your motors need to be reversed in.
leftFront.setDirection(DcMotorSimple.Direction.REVERSE);
leftRear.setDirection(DcMotorSimple.Direction.REVERSE);
leftFront.setDirection(leftFrontMotorDirection);
leftRear.setDirection(leftRearMotorDirection);
rightFront.setDirection(rightFrontMotorDirection);
rightRear.setDirection(rightRearMotorDirection);
motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear);
@ -120,11 +123,15 @@ public class LateralZeroPowerAccelerationTuner extends OpMode {
@Override
public void loop() {
if (gamepad1.cross || gamepad1.a) {
for (DcMotorEx motor : motors) {
motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
motor.setPower(0);
}
requestOpModeStop();
}
poseUpdater.update();
Vector heading = new Vector(1.0, poseUpdater.getPose().getHeading() - Math.PI/2);
Vector heading = new Vector(1.0, poseUpdater.getPose().getHeading() - Math.PI / 2);
if (!end) {
if (!stopping) {
if (MathFunctions.dotProduct(poseUpdater.getVelocity(), heading) > VELOCITY) {
@ -149,7 +156,7 @@ public class LateralZeroPowerAccelerationTuner extends OpMode {
for (Double acceleration : accelerations) {
average += acceleration;
}
average /= (double)accelerations.size();
average /= (double) accelerations.size();
telemetryA.addData("lateral zero power acceleration (deceleration):", average);
telemetryA.update();

View File

@ -4,6 +4,10 @@ import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstan
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorDirection;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorDirection;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
@ -12,7 +16,6 @@ import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType;
import org.firstinspires.ftc.robotcore.external.Telemetry;
@ -40,7 +43,7 @@ import java.util.List;
* @version 1.0, 3/13/2024
*/
@Config
@Autonomous (name = "Strafe Velocity Tuner", group = "Autonomous Pathing Tuning")
@Autonomous(name = "Strafe Velocity Tuner", group = "Autonomous Pathing Tuning")
public class StrafeVelocityTuner extends OpMode {
private ArrayList<Double> velocities = new ArrayList<>();
@ -52,7 +55,7 @@ public class StrafeVelocityTuner extends OpMode {
private PoseUpdater poseUpdater;
public static double DISTANCE = 40;
public static double DISTANCE = 48;
public static double RECORD_NUMBER = 10;
private Telemetry telemetryA;
@ -71,10 +74,10 @@ public class StrafeVelocityTuner extends OpMode {
leftRear = hardwareMap.get(DcMotorEx.class, leftRearMotorName);
rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName);
rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName);
// TODO: Make sure that this is the direction your motors need to be reversed in.
leftFront.setDirection(DcMotorSimple.Direction.REVERSE);
leftRear.setDirection(DcMotorSimple.Direction.REVERSE);
leftFront.setDirection(leftFrontMotorDirection);
leftRear.setDirection(leftRearMotorDirection);
rightFront.setDirection(rightFrontMotorDirection);
rightRear.setDirection(rightRearMotorDirection);
motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear);
@ -120,6 +123,10 @@ public class StrafeVelocityTuner extends OpMode {
@Override
public void loop() {
if (gamepad1.cross || gamepad1.a) {
for (DcMotorEx motor : motors) {
motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
motor.setPower(0);
}
requestOpModeStop();
}
@ -132,11 +139,18 @@ public class StrafeVelocityTuner extends OpMode {
motor.setPower(0);
}
} else {
double currentVelocity = Math.abs(MathFunctions.dotProduct(poseUpdater.getVelocity(), new Vector(1, Math.PI/2)));
double currentVelocity = Math.abs(MathFunctions.dotProduct(poseUpdater.getVelocity(), new Vector(1, Math.PI / 2)));
velocities.add(currentVelocity);
velocities.remove(0);
}
} else {
leftFront.setPower(0);
leftRear.setPower(0);
rightFront.setPower(0);
rightRear.setPower(0);
for (DcMotorEx motor : motors) {
motor.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
}
double average = 0;
for (Double velocity : velocities) {
average += velocity;

View File

@ -20,11 +20,14 @@ import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
* @version 1.0, 4/22/2024
*/
public class Drawing {
public static final double ROBOT_RADIUS = 9;
private static TelemetryPacket packet;
/**
* This draws everything that will be used in the Follower's telemetryDebug() method. This takes
* a Follower as an input, so an instance of the DashbaordDrawingHandler class is not needed.
*
* @param follower
*/
public static void drawDebug(Follower follower) {
@ -35,6 +38,7 @@ public class Drawing {
}
drawPoseHistory(follower.getDashboardPoseTracker(), "#4CAF50");
drawRobot(follower.getPose(), "#4CAF50");
sendPacket();
}
@ -114,12 +118,10 @@ public class Drawing {
* @param t the Point to draw at
*/
public static void drawRobotOnCanvas(Canvas c, Point t) {
final double ROBOT_RADIUS = 9;
c.setStrokeWidth(1);
c.strokeCircle(t.getX(), t.getY(), ROBOT_RADIUS);
Vector halfv = new Vector(0.5*ROBOT_RADIUS, t.getTheta());
Vector halfv = new Vector(0.5 * ROBOT_RADIUS, t.getTheta());
Vector p1 = MathFunctions.addVectors(halfv, new Vector(t.getR(), t.getTheta()));
Vector p2 = MathFunctions.addVectors(p1, halfv);
c.strokeLine(p1.getXComponent(), p1.getYComponent(), p2.getXComponent(), p2.getYComponent());
@ -133,8 +135,6 @@ public class Drawing {
* @param t the Pose to draw at
*/
public static void drawRobotOnCanvas(Canvas c, Pose t) {
final double ROBOT_RADIUS = 9;
c.strokeCircle(t.getX(), t.getY(), ROBOT_RADIUS);
Vector v = t.getHeadingVector();
v.setMagnitude(v.getMagnitude() * ROBOT_RADIUS);

View File

@ -21,7 +21,7 @@ apply plugin: 'com.android.application'
android {
compileSdkVersion 29
compileSdkVersion 30
signingConfigs {
release {
@ -113,10 +113,6 @@ android {
packagingOptions {
pickFirst '**/*.so'
}
sourceSets.main {
jni.srcDirs = []
jniLibs.srcDir rootProject.file('libs')
}
ndkVersion '21.3.6528147'
}

View File

@ -6,14 +6,14 @@ repositories {
}
dependencies {
implementation 'org.firstinspires.ftc:Inspection:10.1.0'
implementation 'org.firstinspires.ftc:Blocks:10.1.0'
implementation 'org.firstinspires.ftc:RobotCore:10.1.0'
implementation 'org.firstinspires.ftc:RobotServer:10.1.0'
implementation 'org.firstinspires.ftc:OnBotJava:10.1.0'
implementation 'org.firstinspires.ftc:Hardware:10.1.0'
implementation 'org.firstinspires.ftc:FtcCommon:10.1.0'
implementation 'org.firstinspires.ftc:Vision:10.1.0'
implementation 'org.firstinspires.ftc:Inspection:10.1.1'
implementation 'org.firstinspires.ftc:Blocks:10.1.1'
implementation 'org.firstinspires.ftc:RobotCore:10.1.1'
implementation 'org.firstinspires.ftc:RobotServer:10.1.1'
implementation 'org.firstinspires.ftc:OnBotJava:10.1.1'
implementation 'org.firstinspires.ftc:Hardware:10.1.1'
implementation 'org.firstinspires.ftc:FtcCommon:10.1.1'
implementation 'org.firstinspires.ftc:Vision:10.1.1'
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation "com.acmerobotics.roadrunner:ftc:0.1.14"

View File

@ -11,7 +11,7 @@ buildscript {
}
dependencies {
// Note for FTC Teams: Do not modify this yourself.
classpath 'com.android.tools.build:gradle:7.2.0'
classpath 'com.android.tools.build:gradle:8.7.0'
}
}

View File

@ -8,3 +8,5 @@ android.enableJetifier=false
# Allow Gradle to use up to 1 GB of RAM
org.gradle.jvmargs=-Xmx1024M
android.nonTransitiveRClass=false

View File

@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.9-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists

0
gradlew vendored Executable file → Normal file
View File