Removing unnecessary files for the current iteration of this drive encoded robot

This commit is contained in:
2024-10-22 22:44:24 -07:00
parent cd10d3a01b
commit 7a028835f8
4 changed files with 0 additions and 1087 deletions

View File

@ -1,171 +0,0 @@
/* Copyright (c) 2022 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_ENCODER_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.IMU_LOGO_FACING_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.IMU_USB_FACING_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.LEFT_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.LEFT_ENCODER_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.RIGHT_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.RIGHT_ENCODER_DIRECTION;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
/*
* This OpMode shows how to use the new universal IMU interface. This
* interface may be used with the BNO055 IMU or the BHI260 IMU. It assumes that an IMU is configured
* on the robot with the name "imu".
*
* The sample will display the current Yaw, Pitch and Roll of the robot.<br>
* With the correct orientation parameters selected, pitch/roll/yaw should act as follows:
* Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X) <br>
* Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y) <br>
* Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z) <br>
*
* The yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller)
*
* This specific sample assumes that the Hub is mounted on one of the three orthogonal planes
* (X/Y, X/Z or Y/Z) and that the Hub has only been rotated in a range of 90 degree increments.
*
* Note: if your Hub is mounted on a surface angled at some non-90 Degree multiple (like 30) look at
* the alternative SensorIMUNonOrthogonal sample in this folder.
*
* This "Orthogonal" requirement means that:
*
* 1) The Logo printed on the top of the Hub can ONLY be pointing in one of six directions:
* FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT.
*
* 2) The USB ports can only be pointing in one of the same six directions:<br>
* FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT.
*
* So, To fully define how your Hub is mounted to the robot, you must simply specify:<br>
* logoFacingDirection<br>
* usbFacingDirection
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* Finally, choose the two correct parameters to define how your Hub is mounted and edit this OpMode
* to use those parameters.
*/
@TeleOp(name = "Sensor: IMU Orthogonal", group = "Sensor")
@Disabled // Comment this out to add to the OpMode list
public class SensorIMUOrthogonal extends LinearOpMode {
// The IMU sensor object
IMU imu;
private Encoder leftEncoder;
private Encoder rightEncoder;
private Encoder strafeEncoder;
//----------------------------------------------------------------------------------------------
// Main logic
//----------------------------------------------------------------------------------------------
@Override
public void runOpMode() throws InterruptedException {
// Retrieve and initialize the IMU.
// This sample expects the IMU to be in a REV Hub and named "imu".
imu = hardwareMap.get(IMU.class, PedroConstants.IMU);
// TODO: replace these with your encoder ports
leftEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, LEFT_ENCODER));
rightEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, RIGHT_ENCODER));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary
leftEncoder.setDirection(LEFT_ENCODER_DIRECTION);
rightEncoder.setDirection(RIGHT_ENCODER_DIRECTION);
strafeEncoder.setDirection(BACK_ENCODER_DIRECTION);
/* Define how the hub is mounted on the robot to get the correct Yaw, Pitch and Roll values.
*
* Two input parameters are required to fully specify the Orientation.
* The first parameter specifies the direction the printed logo on the Hub is pointing.
* The second parameter specifies the direction the USB connector on the Hub is pointing.
* All directions are relative to the robot, and left/right is as-viewed from behind the robot.
*
* If you are using a REV 9-Axis IMU, you can use the Rev9AxisImuOrientationOnRobot class instead of the
* RevHubOrientationOnRobot class, which has an I2cPortFacingDirection instead of a UsbFacingDirection.
*/
/* The next two lines define Hub orientation.
* The Default Orientation (shown) is when a hub is mounted horizontally with the printed logo pointing UP and the USB port pointing FORWARD.
*
* To Do: EDIT these two lines to match YOUR mounting configuration.
*/
RevHubOrientationOnRobot.LogoFacingDirection logoDirection = IMU_LOGO_FACING_DIRECTION;
RevHubOrientationOnRobot.UsbFacingDirection usbDirection = IMU_USB_FACING_DIRECTION;
RevHubOrientationOnRobot orientationOnRobot = new RevHubOrientationOnRobot(logoDirection, usbDirection);
// Now initialize the IMU with this mounting orientation
// Note: if you choose two conflicting directions, this initialization will cause a code exception.
imu.initialize(new IMU.Parameters(orientationOnRobot));
// Loop and update the dashboard
while (!isStopRequested()) {
telemetry.addData("Hub orientation", "Logo=%s USB=%s\n ", logoDirection, usbDirection);
// Check to see if heading reset is requested
if (gamepad1.y) {
telemetry.addData("Yaw", "Resetting\n");
imu.resetYaw();
} else {
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n");
}
// Retrieve Rotational Angles and Velocities
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
telemetry.update();
}
}
}

View File

@ -1,317 +0,0 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.PedroConstants.*;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the ThreeWheelIMULocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the three wheel odometry set up with the IMU to have more accurate heading
* readings. The diagram below, which is modified from Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || || |
* | || || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
*
* @author Logan Nash
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 7/9/2024
*/
@Config
public class ThreeWheelIMULocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder leftEncoder;
private Encoder rightEncoder;
private Encoder strafeEncoder;
private Pose leftEncoderPose;
private Pose rightEncoderPose;
private Pose strafeEncoderPose;
public final IMU imu;
private double previousIMUOrientation;
private double deltaRadians;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 0.004;//8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double STRAFE_TICKS_TO_INCHES = -0.0036;//8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
public static double TURN_TICKS_TO_RADIANS = 0.0043;//8192 * 1.37795 * 2 * Math.PI * 0.5;
public static boolean useIMU = true;
/**
* This creates a new ThreeWheelIMULocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public ThreeWheelIMULocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new ThreeWheelIMULocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public ThreeWheelIMULocalizer(HardwareMap map, Pose setStartPose) {
hardwareMap = map;
imu = hardwareMap.get(IMU.class, IMU);
// TODO: replace this with your IMU's orientation
imu.initialize(new IMU.Parameters(new RevHubOrientationOnRobot(IMU_LOGO_FACING_DIRECTION, IMU_USB_FACING_DIRECTION)));
// TODO: replace these with your encoder positions
leftEncoderPose = new Pose(-7.625, 6.19375, 0);
rightEncoderPose = new Pose(-7.625, -6.19375, 0);
strafeEncoderPose = new Pose(7, 1, Math.toRadians(90));
// TODO: replace these with your encoder ports
leftEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, LEFT_ENCODER));
rightEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, RIGHT_ENCODER));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary
leftEncoder.setDirection(LEFT_ENCODER_DIRECTION);
rightEncoder.setDirection(RIGHT_ENCODER_DIRECTION);
strafeEncoder.setDirection(BACK_ENCODER_DIRECTION);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
totalHeading = 0;
resetEncoders();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders. Then, the robot's global change in position is calculated
* using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano * Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders.
*/
public void updateEncoders() {
leftEncoder.update();
rightEncoder.update();
strafeEncoder.update();
double currentIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = MathFunctions.getTurnDirection(previousIMUOrientation, currentIMUOrientation) * MathFunctions.getSmallestAngleDifference(currentIMUOrientation, previousIMUOrientation);
previousIMUOrientation = currentIMUOrientation;
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
leftEncoder.reset();
rightEncoder.reset();
strafeEncoder.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * ((rightEncoder.getDeltaPosition() * leftEncoderPose.getY() - leftEncoder.getDeltaPosition() * rightEncoderPose.getY()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY()))));
// theta/turning
if (MathFunctions.getSmallestAngleDifference(0, deltaRadians) > 0.00005 && useIMU) {
returnMatrix.set(2, 0, deltaRadians);
} else {
returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
}
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return TURN_TICKS_TO_RADIANS;
}
/**
* This resets the IMU.
*/
public void resetIMU() {
imu.resetYaw();
}
}

View File

@ -1,297 +0,0 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.PedroConstants.*;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the ThreeWheelLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the three wheel odometry set up. The diagram below, which is modified from
* Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || || |
* | || || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
@Config
public class ThreeWheelLocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder leftEncoder;
private Encoder rightEncoder;
private Encoder strafeEncoder;
private Pose leftEncoderPose;
private Pose rightEncoderPose;
private Pose strafeEncoderPose;
private double totalHeading;
// public static double FORWARD_TICKS_TO_INCHES = 0.00052189;//8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double FORWARD_TICKS_TO_INCHES = 0.0029;//8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
// public static double STRAFE_TICKS_TO_INCHES = 0.00052189;//8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
public static double STRAFE_TICKS_TO_INCHES = 0.0029;//8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
// public static double TURN_TICKS_TO_RADIANS = 0.00053717;//8192 * 1.37795 * 2 * Math.PI * 0.5;
public static double TURN_TICKS_TO_RADIANS = 0.003;//8192 * 1.37795 * 2 * Math.PI * 0.5;
/**
* This creates a new ThreeWheelLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public ThreeWheelLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new ThreeWheelLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public ThreeWheelLocalizer(HardwareMap map, Pose setStartPose) {
// TODO: replace these with your encoder positions
leftEncoderPose = new Pose(0, 6.19375, 0);
rightEncoderPose = new Pose(0, -6.19375, 0);
strafeEncoderPose = new Pose(-7, 0, Math.toRadians(90));
hardwareMap = map;
// TODO: replace these with your encoder ports
leftEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, LEFT_ENCODER));
rightEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, RIGHT_ENCODER));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary
leftEncoder.setDirection(LEFT_ENCODER_DIRECTION);
rightEncoder.setDirection(RIGHT_ENCODER_DIRECTION);
strafeEncoder.setDirection(BACK_ENCODER_DIRECTION);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
totalHeading = 0;
resetEncoders();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders. Then, the robot's global change in position is calculated
* using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano * Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders.
*/
public void updateEncoders() {
leftEncoder.update();
rightEncoder.update();
strafeEncoder.update();
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
leftEncoder.reset();
rightEncoder.reset();
strafeEncoder.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * ((rightEncoder.getDeltaPosition() * leftEncoderPose.getY() - leftEncoder.getDeltaPosition() * rightEncoderPose.getY()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY()))));
// theta/turning
returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return TURN_TICKS_TO_RADIANS;
}
/**
* This does nothing since this localizer does not use the IMU.
*/
public void resetIMU() {
}
}

View File

@ -1,302 +0,0 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.LEFT_ENCODER;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the TwoWheelLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the two wheel odometry with IMU set up. The diagram below, which is modified from
* Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || || |
* | || || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
@Config
public class TwoWheelLocalizer extends Localizer { // todo: make two wheel odo work
private HardwareMap hardwareMap;
private IMU imu;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder forwardEncoder;
private Encoder strafeEncoder;
private Pose forwardEncoderPose;
private Pose strafeEncoderPose;
private double previousIMUOrientation;
private double deltaRadians;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double STRAFE_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
/**
* This creates a new TwoWheelLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public TwoWheelLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new TwoWheelLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public TwoWheelLocalizer(HardwareMap map, Pose setStartPose) {
// TODO: replace these with your encoder positions
forwardEncoderPose = new Pose(-18.5/25.4 - 0.1, 164.4/25.4, 0);
strafeEncoderPose = new Pose(-107.9/25.4+0.25, -1.1/25.4-0.23, Math.toRadians(90));
hardwareMap = map;
imu = hardwareMap.get(IMU.class, "imu");
// TODO: replace this with your IMU's orientation
imu.initialize(new IMU.Parameters(new RevHubOrientationOnRobot(RevHubOrientationOnRobot.LogoFacingDirection.UP, RevHubOrientationOnRobot.UsbFacingDirection.LEFT)));
// TODO: replace these with your encoder ports
forwardEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, LEFT_ENCODER));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary
forwardEncoder.setDirection(Encoder.REVERSE);
strafeEncoder.setDirection(Encoder.FORWARD);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
previousIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = 0;
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders and the IMU readings. Then, the robot's global change in
* position is calculated using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano * Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders as well as the IMU.
*/
public void updateEncoders() {
forwardEncoder.update();
strafeEncoder.update();
double currentIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = MathFunctions.getTurnDirection(previousIMUOrientation, currentIMUOrientation) * MathFunctions.getSmallestAngleDifference(currentIMUOrientation, previousIMUOrientation);
previousIMUOrientation = currentIMUOrientation;
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
forwardEncoder.reset();
strafeEncoder.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders and IMU.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * (forwardEncoder.getDeltaPosition() - forwardEncoderPose.getY() * deltaRadians));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * deltaRadians));
// theta/turning
returnMatrix.set(2,0, deltaRadians);
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return 1;
}
/**
* This resets the IMU.
*/
public void resetIMU() {
imu.resetYaw();
}
}