18 Commits

Author SHA1 Message Date
bee78d7d71 WIP bobito tunings 2024-10-30 19:39:49 -07:00
b442008fb6 Updating localizer/tuning values for Bobito 2024-10-30 12:54:51 -07:00
4e8c37e01b Added motor direction logic 2024-10-22 22:46:20 -07:00
6a125ff247 Added tuned values for Bobito 2024-10-22 22:46:04 -07:00
1c4b3723c3 Added debugging for encoder logic 2024-10-22 22:44:55 -07:00
7a028835f8 Removing unnecessary files for the current iteration of this drive encoded robot 2024-10-22 22:44:24 -07:00
cd10d3a01b Preliminary configuration for Bobito as a DriveEncoder robot 2024-10-22 20:36:02 -07:00
89f4c1b9a0 Merge remote-tracking branch 'origin/branch-rc-chassis-14493' into branch-rc-chassis-14493 2024-10-22 20:26:28 -07:00
6f784936d2 Add Carlos's file 2024-10-22 20:26:15 -07:00
945a77ca49 Aditya's sample code 2024-10-22 17:28:55 -07:00
43c505e292 Updated values back to when they worked 2024-10-22 16:26:33 -07:00
5cec300e58 Upgrade libs to 10.1 2024-10-21 21:56:23 -07:00
00146b2e40 Currently working with Pedro Pathing and tuned as of this date. 2024-10-21 21:54:09 -07:00
ba5e1e6fe4 Committing translational and heading PID 2024-10-20 19:03:52 -07:00
a64a558f2f Commit tune in progress 2024-10-20 17:23:31 -07:00
99099bf78f Changing paths a bit, 3-wheel w/o IMU. This checks out 2024-10-20 17:08:05 -07:00
efd3302645 Outstanding robot 2024-09-26 16:16:10 -07:00
a7f060c3eb Add initial code 2024-09-24 17:10:22 -07:00
19 changed files with 970 additions and 984 deletions

View File

@ -0,0 +1,122 @@
package org.firstinspires.ftc.teamcode;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_LEFT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_RIGHT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_LEFT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_RIGHT_MOTOR;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "AutoExample", group = "Autonomous Pathing Tuning")
public class AutoExample extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(12.0, 11, 90);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.6);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierLine(
new Point(12.804, 11.223, Point.CARTESIAN),
new Point(12.804, 42.362, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 2
new BezierCurve(
new Point(12.804, 42.362, Point.CARTESIAN),
new Point(11.381, 57.379, Point.CARTESIAN),
new Point(31.614, 56.588, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 3
new BezierLine(
new Point(31.614, 56.588, Point.CARTESIAN),
new Point(51.214, 56.746, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 4
new BezierCurve(
new Point(51.214, 56.746, Point.CARTESIAN),
new Point(64.334, 58.643, Point.CARTESIAN),
new Point(61.172, 45.524, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 5
new BezierCurve(
new Point(61.172, 45.524, Point.CARTESIAN),
new Point(36.198, 26.239, Point.CARTESIAN),
new Point(19.759, 11.065, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -0,0 +1,142 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "AutoExampleSeason2025V1", group = "Autonomous Pathing Tuning")
public class AutoExampleSeason2025V1 extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(15.0, 35, 90);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.375);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierLine(
new Point(15.000, 35.000, Point.CARTESIAN),
new Point(60.000, 35.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 2
new BezierLine(
new Point(60.000, 35.000, Point.CARTESIAN),
new Point(60.000, 25.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 3
new BezierLine(
new Point(60.000, 25.000, Point.CARTESIAN),
new Point(15.000, 25.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 4
new BezierLine(
new Point(15.000, 25.000, Point.CARTESIAN),
new Point(60.000, 25.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 5
new BezierLine(
new Point(60.000, 25.000, Point.CARTESIAN),
new Point(60.000, 15.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 6
new BezierLine(
new Point(60.000, 15.000, Point.CARTESIAN),
new Point(15.000, 15.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 7
new BezierLine(
new Point(15.000, 15.000, Point.CARTESIAN),
new Point(60.000, 15.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 8
new BezierLine(
new Point(60.000, 15.000, Point.CARTESIAN),
new Point(60.000, 8.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 9
new BezierLine(
new Point(60.000, 8.000, Point.CARTESIAN),
new Point(15.000, 8.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90)).build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -0,0 +1,80 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "AutoExampleThree", group = "Autonomous Pathing Tuning")
public class AutoExampleThree extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(10.0, 40, 90);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.4);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierCurve(
new Point(10.000, 20.000, Point.CARTESIAN),
new Point(29.089, 61.232, Point.CARTESIAN),
new Point(48.054, 19.607, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90)).build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -0,0 +1,95 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "AutoExampleTwo", group = "Autonomous Pathing Tuning")
public class AutoExampleTwo extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(10.0, 40, 90);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.4);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierLine(
new Point(10.000, 40.000, Point.CARTESIAN),
new Point(60.000, 40.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 2
new BezierLine(
new Point(60.000, 40.000, Point.CARTESIAN),
new Point(60.000, 25.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 3
new BezierLine(
new Point(60.000, 25.000, Point.CARTESIAN),
new Point(10.000, 25.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90)).build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -0,0 +1,199 @@
/* Copyright (c) 2021 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_LEFT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_LEFT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_RIGHT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_RIGHT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_LEFT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_LEFT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_RIGHT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_RIGHT_MOTOR_DIRECTION;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
/*
* This file contains an example of a Linear "OpMode".
* An OpMode is a 'program' that runs in either the autonomous or the teleop period of an FTC match.
* The names of OpModes appear on the menu of the FTC Driver Station.
* When a selection is made from the menu, the corresponding OpMode is executed.
*
* This particular OpMode illustrates driving a 4-motor Omni-Directional (or Holonomic) robot.
* This code will work with either a Mecanum-Drive or an X-Drive train.
* Both of these drives are illustrated at https://gm0.org/en/latest/docs/robot-design/drivetrains/holonomic.html
* Note that a Mecanum drive must display an X roller-pattern when viewed from above.
*
* Also note that it is critical to set the correct rotation direction for each motor. See details below.
*
* Holonomic drives provide the ability for the robot to move in three axes (directions) simultaneously.
* Each motion axis is controlled by one Joystick axis.
*
* 1) Axial: Driving forward and backward Left-joystick Forward/Backward
* 2) Lateral: Strafing right and left Left-joystick Right and Left
* 3) Yaw: Rotating Clockwise and counter clockwise Right-joystick Right and Left
*
* This code is written assuming that the right-side motors need to be reversed for the robot to drive forward.
* When you first test your robot, if it moves backward when you push the left stick forward, then you must flip
* the direction of all 4 motors (see code below).
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@TeleOp(name = "Basic: Omni Linear OpMode", group = "Linear OpMode")
public class BasicOmniOpMode_Linear extends LinearOpMode {
// Declare OpMode members for each of the 4 motors.
private final ElapsedTime runtime = new ElapsedTime();
private Encoder leftFront;
private Encoder rightFront;
private Encoder leftRear;
private Encoder rightRear;
@Override
public void runOpMode() {
// Initialize the hardware variables. Note that the strings used here must correspond
// to the names assigned during the robot configuration step on the DS or RC devices.
DcMotor leftFrontDrive = hardwareMap.get(DcMotor.class, FRONT_LEFT_MOTOR);
DcMotor leftBackDrive = hardwareMap.get(DcMotor.class, BACK_LEFT_MOTOR);
DcMotor rightFrontDrive = hardwareMap.get(DcMotor.class, FRONT_RIGHT_MOTOR);
DcMotor rightBackDrive = hardwareMap.get(DcMotor.class, BACK_RIGHT_MOTOR);
leftFront = new Encoder(hardwareMap.get(DcMotorEx.class, FRONT_LEFT_MOTOR));
leftRear = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_LEFT_MOTOR));
rightRear = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_RIGHT_MOTOR));
rightFront = new Encoder(hardwareMap.get(DcMotorEx.class, FRONT_RIGHT_MOTOR));
// ########################################################################################
// !!! IMPORTANT Drive Information. Test your motor directions. !!!!!
// ########################################################################################
// Most robots need the motors on one side to be reversed to drive forward.
// The motor reversals shown here are for a "direct drive" robot (the wheels turn the same direction as the motor shaft)
// If your robot has additional gear reductions or uses a right-angled drive, it's important to ensure
// that your motors are turning in the correct direction. So, start out with the reversals here, BUT
// when you first test your robot, push the left joystick forward and observe the direction the wheels turn.
// Reverse the direction (flip FORWARD <-> REVERSE ) of any wheel that runs backward
// Keep testing until ALL the wheels move the robot forward when you push the left joystick forward.
leftFrontDrive.setDirection(FRONT_LEFT_MOTOR_DIRECTION);
leftBackDrive.setDirection(BACK_LEFT_MOTOR_DIRECTION);
rightFrontDrive.setDirection(FRONT_RIGHT_MOTOR_DIRECTION);
rightBackDrive.setDirection(BACK_RIGHT_MOTOR_DIRECTION);
leftFrontDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
leftBackDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
rightFrontDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
rightBackDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
leftFrontDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
leftBackDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
rightFrontDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
rightBackDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
// Wait for the game to start (driver presses START)
telemetry.addData("Status", "Initialized");
telemetry.update();
waitForStart();
runtime.reset();
// run until the end of the match (driver presses STOP)
while (opModeIsActive()) {
double max;
// POV Mode uses left joystick to go forward & strafe, and right joystick to rotate.
double axial = -gamepad1.left_stick_y; // Note: pushing stick forward gives negative value
double lateral = gamepad1.left_stick_x;
double yaw = gamepad1.right_stick_x;
// Combine the joystick requests for each axis-motion to determine each wheel's power.
// Set up a variable for each drive wheel to save the power level for telemetry.
double leftFrontPower = axial + lateral + yaw;
double rightFrontPower = axial - lateral - yaw;
double leftBackPower = axial - lateral + yaw;
double rightBackPower = axial + lateral - yaw;
// Normalize the values so no wheel power exceeds 100%
// This ensures that the robot maintains the desired motion.
max = Math.max(Math.abs(leftFrontPower), Math.abs(rightFrontPower));
max = Math.max(max, Math.abs(leftBackPower));
max = Math.max(max, Math.abs(rightBackPower));
if (max > 1.0) {
leftFrontPower /= max;
rightFrontPower /= max;
leftBackPower /= max;
rightBackPower /= max;
}
// This is test code:
//
// Uncomment the following code to test your motor directions.
// Each button should make the corresponding motor run FORWARD.
// 1) First get all the motors to take to correct positions on the robot
// by adjusting your Robot Configuration if necessary.
// 2) Then make sure they run in the correct direction by modifying the
// the setDirection() calls above.
// Once the correct motors move in the correct direction re-comment this code.
/*
leftFrontPower = gamepad1.x ? 1.0 : 0.0; // X gamepad
leftBackPower = gamepad1.a ? 1.0 : 0.0; // A gamepad
rightFrontPower = gamepad1.y ? 1.0 : 0.0; // Y gamepad
rightBackPower = gamepad1.b ? 1.0 : 0.0; // B gamepad
*/
// Send calculated power to wheels
leftFrontDrive.setPower(leftFrontPower);
rightFrontDrive.setPower(rightFrontPower);
leftBackDrive.setPower(leftBackPower);
rightBackDrive.setPower(rightBackPower);
// Show the elapsed game time and wheel power.
telemetry.addData("Status", "Run Time: " + runtime);
telemetry.addData("Front left/Right", "%4.2f, %4.2f", leftFrontPower, rightFrontPower);
telemetry.addData("Back left/Right", "%4.2f, %4.2f", leftBackPower, rightBackPower);
telemetry.addData("Encoder Front Left", leftFrontDrive.getDirection() + " : " + leftFrontDrive.getCurrentPosition());
telemetry.addData("Encoder Front Right", rightFrontDrive.getCurrentPosition());
telemetry.addData("Encoder Back Left", leftBackDrive.getCurrentPosition());
telemetry.addData("Encoder Back Right", rightBackDrive.getCurrentPosition());
telemetry.update();
}
}
}

View File

@ -0,0 +1,247 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "BluebAutoV1", group = "Autonomous Pathing Tuning")
public class BluebAutoV1 extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(7.5, 72, 90);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.4);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierLine(
new Point(7.5, 72, Point.CARTESIAN),
new Point(29.893, 38.250, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 2
new BezierLine(
new Point(29.893, 38.250, Point.CARTESIAN),
new Point(65.250, 32.143, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 3
new BezierLine(
new Point(65.250, 32.143, Point.CARTESIAN),
new Point(61.714, 24.429, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 4
new BezierLine(
new Point(61.714, 24.429, Point.CARTESIAN),
new Point(13.821, 22.821, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 5
new BezierLine(
new Point(13.821, 22.821, Point.CARTESIAN),
new Point(61.714, 24.429, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 6
new BezierLine(
new Point(61.714, 24.429, Point.CARTESIAN),
new Point(60.750, 12.696, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 7
new BezierLine(
new Point(60.750, 12.696, Point.CARTESIAN),
new Point(12.375, 13.179, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 8
new BezierLine(
new Point(12.375, 13.179, Point.CARTESIAN),
new Point(60.750, 12.536, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 9
new BezierLine(
new Point(60.750, 12.536, Point.CARTESIAN),
new Point(60.589, 9.321, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 10
new BezierLine(
new Point(60.589, 9.321, Point.CARTESIAN),
new Point(12.536, 8.357, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 11
new BezierLine(
new Point(12.536, 8.357, Point.CARTESIAN),
new Point(26.679, 8.679, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 12
new BezierLine(
new Point(26.679, 8.679, Point.CARTESIAN),
new Point(22.821, 109.446, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 13
new BezierLine(
new Point(22.821, 109.446, Point.CARTESIAN),
new Point(70.714, 109.446, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 14
new BezierLine(
new Point(70.714, 109.446, Point.CARTESIAN),
new Point(71.036, 120.214, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 15
new BezierLine(
new Point(71.036, 120.214, Point.CARTESIAN),
new Point(22.179, 120.214, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 16
new BezierLine(
new Point(22.179, 120.214, Point.CARTESIAN),
new Point(11.089, 130.821, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 17
new BezierLine(
new Point(11.089, 130.821, Point.CARTESIAN),
new Point(70.714, 112.018, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 18
new BezierLine(
new Point(70.714, 112.018, Point.CARTESIAN),
new Point(70.714, 128.250, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 19
new BezierLine(
new Point(70.714, 128.250, Point.CARTESIAN),
new Point(9.964, 130.018, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 20
new BezierLine(
new Point(9.964, 130.018, Point.CARTESIAN),
new Point(70.554, 130.500, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 21
new BezierLine(
new Point(70.554, 130.500, Point.CARTESIAN),
new Point(70.393, 135.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 22
new BezierLine(
new Point(70.393, 135.000, Point.CARTESIAN),
new Point(13.821, 134.839, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90)).build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -0,0 +1,4 @@
package org.firstinspires.ftc.teamcode;
public class BluenbAutov1 {
}

View File

@ -20,49 +20,36 @@ public class PedroConstants {
// Robot motor direction
public static final Direction FRONT_LEFT_MOTOR_DIRECTION = Direction.REVERSE;
public static final Direction BACK_LEFT_MOTOR_DIRECTION = Direction.REVERSE;
public static final Direction FRONT_RIGHT_MOTOR_DIRECTION = Direction.FORWARD;
public static final Direction BACK_RIGHT_MOTOR_DIRECTION = Direction.FORWARD;
// Robot IMU configuration
public static final String IMU = "imu";
// Robot IMU placement
public static final RevHubOrientationOnRobot.LogoFacingDirection IMU_LOGO_FACING_DIRECTION
= RevHubOrientationOnRobot.LogoFacingDirection.DOWN;
public static final RevHubOrientationOnRobot.UsbFacingDirection IMU_USB_FACING_DIRECTION
= RevHubOrientationOnRobot.UsbFacingDirection.LEFT;
// Robot encoders
public static final String LEFT_ENCODER = "encoder left";
public static final String RIGHT_ENCODER = "encoder right";
public static final String BACK_ENCODER = "encoder back";
public static final Direction FRONT_RIGHT_MOTOR_DIRECTION = Direction.REVERSE;
public static final Direction BACK_RIGHT_MOTOR_DIRECTION = Direction.REVERSE;
// Robot encoder direction
public static final double LEFT_ENCODER_DIRECTION = Encoder.FORWARD;
public static final double RIGHT_ENCODER_DIRECTION = Encoder.FORWARD;
public static final double BACK_ENCODER_DIRECTION = Encoder.REVERSE;
public static final double FRONT_LEFT_MOTOR_ENCODER = Encoder.FORWARD;
public static final double BACK_LEFT_MOTOR_ENCODER = Encoder.FORWARD;
public static final double FRONT_RIGHT_MOTOR_ENCODER = Encoder.FORWARD;
public static final double BACK_RIGHT_MOTOR_ENCODER = Encoder.FORWARD;
/*
Pedro's parameters
*/
// The weight of the robot in Kilograms
public static final double ROBOT_WEIGHT_IN_KG = 10.5;
public static final double ROBOT_WEIGHT_IN_KG = 5.15;
// Maximum velocity of the robot going forward
public static final double ROBOT_SPEED_FORWARD = 72.0693;
public static final double ROBOT_SPEED_FORWARD = 66.6117;
// Maximum velocity of the robot going right
public static final double ROBOT_SPEED_LATERAL = 24.1401;
public static final double ROBOT_SPEED_LATERAL = 60.0671;
// Rate of deceleration when power is cut-off when the robot is moving forward
public static final double FORWARD_ZERO_POWER_ACCEL = -74.3779;
public static final double FORWARD_ZERO_POWER_ACCEL = -71.154;
// Rate of deceleration when power is cut-off when the robot is moving to the right
public static final double LATERAL_ZERO_POWER_ACCEL = -111.8409;
public static final double LATERAL_ZERO_POWER_ACCEL = -109.5358;
// Determines how fast your robot will decelerate as a factor of how fast your robot will coast to a stop
public static final double ZERO_POWER_ACCEL_MULT = 4;
public static final double ZERO_POWER_ACCEL_MULT = 4.0;
/* Centripetal force correction - increase if robot is correcting into the path
- decrease if robot is correcting away from the path */

View File

@ -71,7 +71,8 @@ measurements will be in centimeters.
of how fast your robot will coast to a stop. Honestly, this is up to you. I personally used 4, but
what works best for you is most important. Higher numbers will cause a faster brake, but increase
oscillations at the end. Lower numbers will do the opposite. This can be found on line `107` in
`FollowerConstants`, named `zeroPowerAccelerationMultiplier`. The drive PID is much, much more sensitive than the others. For reference,
`FollowerConstants`, named `zeroPowerAccelerationMultiplier`. The drive PID is much, much more
* sensitive than the others. For reference,
my P values were in the hundredths and thousandths place values, and my D values were in the hundred
thousandths and millionths place values. To tune this, enable `useDrive`, `useHeading`, and
`useTranslational` in the `Follower` dropdown in FTC Dashboard. Next, run `StraightBackAndForth`

View File

@ -5,9 +5,7 @@ import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.ThreeWheelIMULocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.ThreeWheelLocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.TwoWheelLocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.DriveEncoderLocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
@ -69,7 +67,7 @@ public class PoseUpdater {
*/
public PoseUpdater(HardwareMap hardwareMap) {
// TODO: replace the second argument with your preferred localizer
this(hardwareMap, new ThreeWheelIMULocalizer(hardwareMap));
this(hardwareMap, new DriveEncoderLocalizer(hardwareMap));
}
/**

View File

@ -1,11 +1,20 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_LEFT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_LEFT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_LEFT_MOTOR_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_RIGHT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_RIGHT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_RIGHT_MOTOR_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_LEFT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_LEFT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_LEFT_MOTOR_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_RIGHT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_RIGHT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_RIGHT_MOTOR_ENCODER;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
@ -26,21 +35,21 @@ import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
*/
@Config
public class DriveEncoderLocalizer extends Localizer {
private HardwareMap hardwareMap;
private final HardwareMap hardwareMap;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private final NanoTimer timer;
private long deltaTimeNano;
private Encoder leftFront;
private Encoder rightFront;
private Encoder leftRear;
private Encoder rightRear;
private final Encoder leftFront;
private final Encoder rightFront;
private final Encoder leftRear;
private final Encoder rightRear;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 1;
public static double STRAFE_TICKS_TO_INCHES = 1;
public static double TURN_TICKS_TO_RADIANS = 1;
public static double FORWARD_TICKS_TO_INCHES = -0.0058;
public static double STRAFE_TICKS_TO_INCHES = -0.0054;
public static double TURN_TICKS_TO_RADIANS = -0.0009;
public static double ROBOT_WIDTH = 1;
public static double ROBOT_LENGTH = 1;
@ -58,22 +67,32 @@ public class DriveEncoderLocalizer extends Localizer {
* This creates a new DriveEncoderLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public DriveEncoderLocalizer(HardwareMap map, Pose setStartPose) {
hardwareMap = map;
leftFront = new Encoder(hardwareMap.get(DcMotorEx.class, leftFrontMotorName));
leftRear = new Encoder(hardwareMap.get(DcMotorEx.class, leftRearMotorName));
rightRear = new Encoder(hardwareMap.get(DcMotorEx.class, rightRearMotorName));
rightFront = new Encoder(hardwareMap.get(DcMotorEx.class, rightFrontMotorName));
leftFront = new Encoder(hardwareMap.get(DcMotorEx.class, FRONT_LEFT_MOTOR));
leftRear = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_LEFT_MOTOR));
rightRear = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_RIGHT_MOTOR));
rightFront = new Encoder(hardwareMap.get(DcMotorEx.class, FRONT_RIGHT_MOTOR));
DcMotor leftFrontDrive = hardwareMap.get(DcMotor.class, FRONT_LEFT_MOTOR);
DcMotor leftBackDrive = hardwareMap.get(DcMotor.class, BACK_LEFT_MOTOR);
DcMotor rightFrontDrive = hardwareMap.get(DcMotor.class, FRONT_RIGHT_MOTOR);
DcMotor rightBackDrive = hardwareMap.get(DcMotor.class, BACK_RIGHT_MOTOR);
leftFrontDrive.setDirection(FRONT_LEFT_MOTOR_DIRECTION);
leftBackDrive.setDirection(BACK_LEFT_MOTOR_DIRECTION);
rightFrontDrive.setDirection(FRONT_RIGHT_MOTOR_DIRECTION);
rightBackDrive.setDirection(BACK_RIGHT_MOTOR_DIRECTION);
// TODO: reverse any encoders necessary
leftFront.setDirection(Encoder.REVERSE);
rightRear.setDirection(Encoder.REVERSE);
leftRear.setDirection(Encoder.FORWARD);
rightRear.setDirection(Encoder.FORWARD);
leftFront.setDirection(FRONT_LEFT_MOTOR_ENCODER);
rightFront.setDirection(FRONT_RIGHT_MOTOR_ENCODER);
leftRear.setDirection(BACK_LEFT_MOTOR_ENCODER);
rightRear.setDirection(BACK_RIGHT_MOTOR_ENCODER);
setStartPose(setStartPose);
timer = new NanoTimer();
@ -129,7 +148,7 @@ public class DriveEncoderLocalizer extends Localizer {
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix = new Matrix(3, 3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
@ -164,7 +183,7 @@ public class DriveEncoderLocalizer extends Localizer {
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
Matrix transformation = new Matrix(3, 3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
@ -214,13 +233,13 @@ public class DriveEncoderLocalizer extends Localizer {
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
Matrix returnMatrix = new Matrix(3, 1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * (leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() + rightRear.getDeltaPosition()));
returnMatrix.set(0, 0, FORWARD_TICKS_TO_INCHES * (leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() + rightRear.getDeltaPosition()));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() - rightRear.getDeltaPosition()));
returnMatrix.set(1, 0, STRAFE_TICKS_TO_INCHES * (-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() - rightRear.getDeltaPosition()));
// theta/turning
returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() - leftRear.getDeltaPosition() + rightRear.getDeltaPosition()) / (ROBOT_WIDTH + ROBOT_LENGTH)));
returnMatrix.set(2, 0, TURN_TICKS_TO_RADIANS * ((-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() - leftRear.getDeltaPosition() + rightRear.getDeltaPosition()) / (ROBOT_WIDTH + ROBOT_LENGTH)));
return returnMatrix;
}

View File

@ -1,317 +0,0 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.PedroConstants.*;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the ThreeWheelIMULocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the three wheel odometry set up with the IMU to have more accurate heading
* readings. The diagram below, which is modified from Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || || |
* | || || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
*
* @author Logan Nash
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 7/9/2024
*/
@Config
public class ThreeWheelIMULocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder leftEncoder;
private Encoder rightEncoder;
private Encoder strafeEncoder;
private Pose leftEncoderPose;
private Pose rightEncoderPose;
private Pose strafeEncoderPose;
public final IMU imu;
private double previousIMUOrientation;
private double deltaRadians;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 0.004;//8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double STRAFE_TICKS_TO_INCHES = -0.0036;//8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
public static double TURN_TICKS_TO_RADIANS = 0.0043;//8192 * 1.37795 * 2 * Math.PI * 0.5;
public static boolean useIMU = true;
/**
* This creates a new ThreeWheelIMULocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public ThreeWheelIMULocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new ThreeWheelIMULocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public ThreeWheelIMULocalizer(HardwareMap map, Pose setStartPose) {
hardwareMap = map;
imu = hardwareMap.get(IMU.class, IMU);
// TODO: replace this with your IMU's orientation
imu.initialize(new IMU.Parameters(new RevHubOrientationOnRobot(IMU_LOGO_FACING_DIRECTION, IMU_USB_FACING_DIRECTION)));
// TODO: replace these with your encoder positions
leftEncoderPose = new Pose(-7.625, 6.19375, 0);
rightEncoderPose = new Pose(-7.625, -6.19375, 0);
strafeEncoderPose = new Pose(7, 1, Math.toRadians(90));
// TODO: replace these with your encoder ports
leftEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, LEFT_ENCODER));
rightEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, RIGHT_ENCODER));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary
leftEncoder.setDirection(LEFT_ENCODER_DIRECTION);
rightEncoder.setDirection(RIGHT_ENCODER_DIRECTION);
strafeEncoder.setDirection(BACK_ENCODER_DIRECTION);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
totalHeading = 0;
resetEncoders();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders. Then, the robot's global change in position is calculated
* using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano * Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders.
*/
public void updateEncoders() {
leftEncoder.update();
rightEncoder.update();
strafeEncoder.update();
double currentIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = MathFunctions.getTurnDirection(previousIMUOrientation, currentIMUOrientation) * MathFunctions.getSmallestAngleDifference(currentIMUOrientation, previousIMUOrientation);
previousIMUOrientation = currentIMUOrientation;
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
leftEncoder.reset();
rightEncoder.reset();
strafeEncoder.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * ((rightEncoder.getDeltaPosition() * leftEncoderPose.getY() - leftEncoder.getDeltaPosition() * rightEncoderPose.getY()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY()))));
// theta/turning
if (MathFunctions.getSmallestAngleDifference(0, deltaRadians) > 0.00005 && useIMU) {
returnMatrix.set(2, 0, deltaRadians);
} else {
returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
}
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return TURN_TICKS_TO_RADIANS;
}
/**
* This resets the IMU.
*/
public void resetIMU() {
imu.resetYaw();
}
}

View File

@ -1,294 +0,0 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.PedroConstants.*;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the ThreeWheelLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the three wheel odometry set up. The diagram below, which is modified from
* Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || || |
* | || || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
@Config
public class ThreeWheelLocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder leftEncoder;
private Encoder rightEncoder;
private Encoder strafeEncoder;
private Pose leftEncoderPose;
private Pose rightEncoderPose;
private Pose strafeEncoderPose;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 0.00052189;//8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double STRAFE_TICKS_TO_INCHES = 0.00052189;//8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
public static double TURN_TICKS_TO_RADIANS = 0.00053717;//8192 * 1.37795 * 2 * Math.PI * 0.5;
/**
* This creates a new ThreeWheelLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public ThreeWheelLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new ThreeWheelLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public ThreeWheelLocalizer(HardwareMap map, Pose setStartPose) {
// TODO: replace these with your encoder positions
leftEncoderPose = new Pose(-18.5/25.4 - 0.1, 164.4/25.4, 0);
rightEncoderPose = new Pose(-18.4/25.4 - 0.1, -159.6/25.4, 0);
strafeEncoderPose = new Pose(0*(-107.9/25.4+8)+-107.9/25.4+0.25, -1.1/25.4-0.23, Math.toRadians(90));
hardwareMap = map;
// TODO: replace these with your encoder ports
leftEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, LEFT_ENCODER));
rightEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, RIGHT_ENCODER));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary
//leftEncoder.setDirection(Encoder.REVERSE);
// rightEncoder.setDirection(Encoder.REVERSE);
//strafeEncoder.setDirection(Encoder.FORWARD);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
totalHeading = 0;
resetEncoders();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders. Then, the robot's global change in position is calculated
* using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano * Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders.
*/
public void updateEncoders() {
leftEncoder.update();
rightEncoder.update();
strafeEncoder.update();
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
leftEncoder.reset();
rightEncoder.reset();
strafeEncoder.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * ((rightEncoder.getDeltaPosition() * leftEncoderPose.getY() - leftEncoder.getDeltaPosition() * rightEncoderPose.getY()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY()))));
// theta/turning
returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return TURN_TICKS_TO_RADIANS;
}
/**
* This does nothing since this localizer does not use the IMU.
*/
public void resetIMU() {
}
}

View File

@ -1,302 +0,0 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.LEFT_ENCODER;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the TwoWheelLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the two wheel odometry with IMU set up. The diagram below, which is modified from
* Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || || |
* | || || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
@Config
public class TwoWheelLocalizer extends Localizer { // todo: make two wheel odo work
private HardwareMap hardwareMap;
private IMU imu;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder forwardEncoder;
private Encoder strafeEncoder;
private Pose forwardEncoderPose;
private Pose strafeEncoderPose;
private double previousIMUOrientation;
private double deltaRadians;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double STRAFE_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
/**
* This creates a new TwoWheelLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public TwoWheelLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new TwoWheelLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public TwoWheelLocalizer(HardwareMap map, Pose setStartPose) {
// TODO: replace these with your encoder positions
forwardEncoderPose = new Pose(-18.5/25.4 - 0.1, 164.4/25.4, 0);
strafeEncoderPose = new Pose(-107.9/25.4+0.25, -1.1/25.4-0.23, Math.toRadians(90));
hardwareMap = map;
imu = hardwareMap.get(IMU.class, "imu");
// TODO: replace this with your IMU's orientation
imu.initialize(new IMU.Parameters(new RevHubOrientationOnRobot(RevHubOrientationOnRobot.LogoFacingDirection.UP, RevHubOrientationOnRobot.UsbFacingDirection.LEFT)));
// TODO: replace these with your encoder ports
forwardEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, LEFT_ENCODER));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary
forwardEncoder.setDirection(Encoder.REVERSE);
strafeEncoder.setDirection(Encoder.FORWARD);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
previousIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = 0;
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders and the IMU readings. Then, the robot's global change in
* position is calculated using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano * Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders as well as the IMU.
*/
public void updateEncoders() {
forwardEncoder.update();
strafeEncoder.update();
double currentIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = MathFunctions.getTurnDirection(previousIMUOrientation, currentIMUOrientation) * MathFunctions.getSmallestAngleDifference(currentIMUOrientation, previousIMUOrientation);
previousIMUOrientation = currentIMUOrientation;
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
forwardEncoder.reset();
strafeEncoder.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders and IMU.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * (forwardEncoder.getDeltaPosition() - forwardEncoderPose.getY() * deltaRadians));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * deltaRadians));
// theta/turning
returnMatrix.set(2,0, deltaRadians);
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return 1;
}
/**
* This resets the IMU.
*/
public void resetIMU() {
imu.resetYaw();
}
}

View File

@ -63,6 +63,7 @@ public class LateralTuner extends OpMode {
telemetryA.addData("distance moved", poseUpdater.getPose().getY());
telemetryA.addLine("The multiplier will display what your strafe ticks to inches should be to scale your current distance to " + DISTANCE + " inches.");
telemetryA.addData("multiplier", DISTANCE / (poseUpdater.getPose().getY() / poseUpdater.getLocalizer().getLateralMultiplier()));
telemetryA.update();
Drawing.drawPoseHistory(dashboardPoseTracker, "#4CAF50");

View File

@ -58,9 +58,6 @@ public class LocalizationTest extends OpMode {
rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName);
rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName);
leftFront.setDirection(DcMotorSimple.Direction.REVERSE);
leftRear.setDirection(DcMotorSimple.Direction.REVERSE);
motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear);
for (DcMotorEx motor : motors) {

View File

@ -202,9 +202,9 @@ public class FollowerConstants {
// Secondary drive PIDF coefficients
public static CustomFilteredPIDFCoefficients secondaryDrivePIDFCoefficients = new CustomFilteredPIDFCoefficients(
0.02,
0.00315,
0,
0.000005,
0.0001,
0.6,
0);

View File

@ -119,6 +119,13 @@ public class LateralZeroPowerAccelerationTuner extends OpMode {
*/
@Override
public void loop() {
telemetry.addData("x",poseUpdater.getPose().getX());
telemetry.addData("y",poseUpdater.getPose().getY());
telemetry.addData("heading",poseUpdater.getPose().getHeading());
telemetry.addData("velo mag", poseUpdater.getVelocity().getMagnitude());
telemetry.addData("velo theta", poseUpdater.getVelocity().getTheta());
if (gamepad1.cross || gamepad1.a) {
requestOpModeStop();
}

View File

@ -6,14 +6,14 @@ repositories {
}
dependencies {
implementation 'org.firstinspires.ftc:Inspection:10.0.0'
implementation 'org.firstinspires.ftc:Blocks:10.0.0'
implementation 'org.firstinspires.ftc:RobotCore:10.0.0'
implementation 'org.firstinspires.ftc:RobotServer:10.0.0'
implementation 'org.firstinspires.ftc:OnBotJava:10.0.0'
implementation 'org.firstinspires.ftc:Hardware:10.0.0'
implementation 'org.firstinspires.ftc:FtcCommon:10.0.0'
implementation 'org.firstinspires.ftc:Vision:10.0.0'
implementation 'org.firstinspires.ftc:Inspection:10.1.0'
implementation 'org.firstinspires.ftc:Blocks:10.1.0'
implementation 'org.firstinspires.ftc:RobotCore:10.1.0'
implementation 'org.firstinspires.ftc:RobotServer:10.1.0'
implementation 'org.firstinspires.ftc:OnBotJava:10.1.0'
implementation 'org.firstinspires.ftc:Hardware:10.1.0'
implementation 'org.firstinspires.ftc:FtcCommon:10.1.0'
implementation 'org.firstinspires.ftc:Vision:10.1.0'
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'com.acmerobotics.dashboard:dashboard:0.4.5'