10 Commits

39 changed files with 279 additions and 4487 deletions

View File

@ -5,20 +5,20 @@ import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.teamcode.cometbots.CometBotAutoDevelopment; import org.firstinspires.ftc.teamcode.cometbots.CometBotAutoDevelopment;
@TeleOp(name = "CometBot Auto", group = "Development") @TeleOp(name = "CometBot Auto v2", group = "Development")
public class CometBotDevAuto extends OpMode { public class CometBotDevAuto extends OpMode {
public CometBotAutoDevelopment runMode; public CometBotAutoDevelopment runMode;
@Override @Override
public void init() { public void init() {
this.runMode = new CometBotAutoDevelopment(hardwareMap, telemetry, gamepad1, gamepad2); runMode = new CometBotAutoDevelopment(hardwareMap, gamepad1, gamepad2);
this.runMode.init(); runMode.init();
} }
@Override @Override
public void loop() { public void loop() {
this.runMode.update(); runMode.update();
telemetry.update(); telemetry.update();
} }
} }

View File

@ -1,64 +0,0 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.roadrunner.Action;
import com.acmerobotics.roadrunner.SleepAction;
import com.acmerobotics.roadrunner.ftc.Actions;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.subsystem.AutoLine1;
import org.firstinspires.ftc.teamcode.subsystem.AutoLine2;
import org.firstinspires.ftc.teamcode.subsystem.AutoLine3;
@Autonomous(name = "BlueNetAuto", group = "Dev")
public class NetAuto extends OpMode {
public Follower follower;
public AutoLine1 myFirstPath = new AutoLine1();
public AutoLine2 mySecondPath = new AutoLine2();
public int pathState = 0;
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(0.65);
myFirstPath.moveToAutoLine1(follower);
}
@Override
public void loop() {
follower.update();
switch(pathState) {
case 0:
if (!follower.isBusy()) {
pathState = 1;
mySecondPath.moveToAutoLine2(follower);
}
case 1:
if (!follower.isBusy()) {
System.out.println("Finished");
}
}
// switch(pathState) {
// case 0:
// if (!follower.isBusy()) {
// mySecondPath.moveToAutoLine2(follower);
// pathState = 1;
// }
// case 1:
// if (!follower.isBusy()) {
// pathState = 2;
// }
// case 2:
// // set path 3
// // as if busy, if not, set path 4 and so on.
// System.out.print("we're at the end");
//
// }
follower.telemetryDebug(telemetry);
}
}

View File

@ -39,20 +39,22 @@ public class PedroConstants {
// Robot encoders // Robot encoders
// NOTE: Encoders are plugged into the same ports as motors hence the weird names // NOTE: Encoders are plugged into the same ports as motors hence the weird names
public static final String RIGHT_ENCODER = "back-right"; //0 public static final String RIGHT_ENCODER = "front-left"; //2
public static final String BACK_ENCODER = "front-right"; //1 public static final String BACK_ENCODER = "front-right"; //1
public static final String LEFT_ENCODER = "front-left"; //2 public static final String LEFT_ENCODER = "back-right"; //0
// Robot encoder direction // Robot encoder direction
public static final double LEFT_ENCODER_DIRECTION = Encoder.FORWARD; public static final double LEFT_ENCODER_DIRECTION = Encoder.FORWARD;
public static final double RIGHT_ENCODER_DIRECTION = Encoder.FORWARD; public static final double RIGHT_ENCODER_DIRECTION = Encoder.REVERSE;
public static final double BACK_ENCODER_DIRECTION = Encoder.FORWARD; public static final double BACK_ENCODER_DIRECTION = Encoder.FORWARD;
// Arm config // Arm config
public static final String SLIDE_MOTOR = "SlideMotor"; public static final String LIFT_SLIDE_LEFT_MOTOR = "lift-slide-left";
public static final String Claw_Servo = "ClawServo"; public static final String LIFT_SLIDE_RIGHT_MOTOR = "lift-slide-right";
public static final String Wrist_Servo = "WristServo"; public static final String CLAW_SERVO = "claw-servo";
public static final String Arm_Servo = "ArmServo"; public static final String WRIST_SERVO = "wrist-servo";
public static final String ARM_SERVO = "arm-servo";
public static final String THUMB_SERVO = "thumb-servo";
/* /*
Pedro's parameters Pedro's parameters
@ -62,19 +64,19 @@ public class PedroConstants {
public static final double ROBOT_WEIGHT_IN_KG = 9; public static final double ROBOT_WEIGHT_IN_KG = 9;
// Maximum velocity of the robot going forward // Maximum velocity of the robot going forward
public static final double ROBOT_SPEED_FORWARD = 51.5; public static final double ROBOT_SPEED_FORWARD = 53.223;
// Maximum velocity of the robot going right // Maximum velocity of the robot going right
public static final double ROBOT_SPEED_LATERAL = 28.7; public static final double ROBOT_SPEED_LATERAL = 41.4081;
// Rate of deceleration when power is cut-off when the robot is moving forward // Rate of deceleration when power is cut-off when the robot is moving forward
public static final double FORWARD_ZERO_POWER_ACCEL = -59.8; public static final double FORWARD_ZERO_POWER_ACCEL = -76.8421;
// Rate of deceleration when power is cut-off when the robot is moving to the right // Rate of deceleration when power is cut-off when the robot is moving to the right
public static final double LATERAL_ZERO_POWER_ACCEL = -99.7; public static final double LATERAL_ZERO_POWER_ACCEL = -93.4183;
// Determines how fast your robot will decelerate as a factor of how fast your robot will coast to a stop // Determines how fast your robot will decelerate as a factor of how fast your robot will coast to a stop
public static final double ZERO_POWER_ACCEL_MULT = 3.5; public static final double ZERO_POWER_ACCEL_MULT = 4.0;
/* Centripetal force correction - increase if robot is correcting into the path /* Centripetal force correction - increase if robot is correcting into the path
- decrease if robot is correcting away from the path */ - decrease if robot is correcting away from the path */

View File

@ -1,151 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "BlueNonBasketAuto", group = "Autonomous Pathing Tuning")
public class BlueNonBasketAuto extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(10.929, 55.446, 0);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.45);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierCurve(
new Point(10.929, 55.446, Point.CARTESIAN),
new Point(42.429, 46.446, Point.CARTESIAN),
new Point(36.321, 38.089, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 2
new BezierLine(
new Point(36.321, 38.089, Point.CARTESIAN),
new Point(59.786, 36.643, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 3
new BezierLine(
new Point(59.786, 36.643, Point.CARTESIAN),
new Point(59.304, 24.750, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 4
new BezierLine(
new Point(59.304, 24.750, Point.CARTESIAN),
new Point(13.982, 23.946, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 5
new BezierLine(
new Point(13.982, 23.946, Point.CARTESIAN),
new Point(59.464, 24.429, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 6
new BezierLine(
new Point(59.464, 24.429, Point.CARTESIAN),
new Point(58.982, 15.268, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 7
new BezierLine(
new Point(58.982, 15.268, Point.CARTESIAN),
new Point(13.821, 14.464, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 8
new BezierLine(
new Point(13.821, 14.464, Point.CARTESIAN),
new Point(58.661, 13.500, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 9
new BezierLine(
new Point(58.661, 13.500, Point.CARTESIAN),
new Point(58.339, 8.679, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 10
new BezierLine(
new Point(58.339, 8.679, Point.CARTESIAN),
new Point(14.625, 8.518, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0)).build();
follower.followPath(path, true);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path, true);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -15,6 +15,7 @@ import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.robotcore.external.Telemetry; import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower; import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.states.FieldStates; import org.firstinspires.ftc.teamcode.states.FieldStates;
import org.firstinspires.ftc.teamcode.subsystem.DualMotorSliderSubsystem;
import org.firstinspires.ftc.teamcode.subsystem.MotorsSubsystem; import org.firstinspires.ftc.teamcode.subsystem.MotorsSubsystem;
public class CometBotAutoDevelopment { public class CometBotAutoDevelopment {
@ -22,121 +23,64 @@ public class CometBotAutoDevelopment {
/* /*
Subsystems Subsystems
*/ */
private MotorsSubsystem motors; private DualMotorSliderSubsystem dualSlides;
/* /*
Controllers Controllers
*/ */
public Gamepad GP1; public Gamepad gamepad1;
public Gamepad GP2; public Gamepad gamepad2;
public Gamepad currentGP1; public Gamepad currentGamepad1;
public Gamepad previousGP1; public Gamepad currentGamepad2;
public Gamepad currentGP2; public Gamepad previousGamepad1;
public Gamepad previousGP2; public Gamepad previousGamepad2;
private Telemetry telemetry;
public FieldStates fieldStates;
private boolean centricity = false;
private Follower follower; private Follower follower;
private HardwareMap hardwareMap;
public CometBotAutoDevelopment(HardwareMap hardwareMap, Telemetry telemetry, Gamepad gp1, Gamepad gp2) { public CometBotAutoDevelopment(HardwareMap hardwareMap, Gamepad gamepad1, Gamepad gamepad2) {
this.motors = new MotorsSubsystem(hardwareMap, telemetry); dualSlides = new DualMotorSliderSubsystem(hardwareMap);
this.GP1 = gp1; this.gamepad1 = gamepad1;
this.GP2 = gp2; this.gamepad2 = gamepad2;
this.hardwareMap = hardwareMap; currentGamepad1 = new Gamepad();
this.telemetry = telemetry; currentGamepad2 = new Gamepad();
this.currentGP1 = new Gamepad(); previousGamepad1 = new Gamepad();
this.currentGP2 = new Gamepad(); previousGamepad2 = new Gamepad();
this.previousGP1 = new Gamepad(); follower = new Follower(hardwareMap);
this.previousGP2 = new Gamepad();
this.fieldStates = new FieldStates();
this.follower = new Follower(hardwareMap);
}
public class ZeroOutPower implements Action {
@Override
public boolean run(@NonNull TelemetryPacket telemetryPacket) {
follower = new Follower(hardwareMap);
follower.setMaxPower(0);
System.out.println("Running ZeroOutPower");
return follower.isBusy();
}
}
public class ReturnToMaxPower implements Action {
@Override
public boolean run(@NonNull TelemetryPacket telemetryPacket) {
follower = new Follower(hardwareMap);
follower.setMaxPower(MAX_POWER);
follower.startTeleopDrive();
System.out.println("Running ReturnToMaxPower");
return follower.isBusy();
}
}
public Action zeroOutPower() {
return new ZeroOutPower();
}
public Action returnToMaxPower() {
return new ReturnToMaxPower();
} }
public void init() { public void init() {
this.motors.init(); dualSlides.init();
this.fieldStates.setFieldLocation(FieldStates.FieldLocation.TRAVELING);
follower.setMaxPower(MAX_POWER); follower.setMaxPower(MAX_POWER);
follower.startTeleopDrive(); follower.startTeleopDrive();
} }
public void update() { public void update() {
this.previousGP1.copy(currentGP1); previousGamepad1.copy(currentGamepad1);
this.currentGP1.copy(this.GP1); currentGamepad1.copy(gamepad1);
this.previousGP2.copy(currentGP2); previousGamepad2.copy(currentGamepad2);
this.currentGP2.copy(this.GP2); currentGamepad2.copy(gamepad2);
this.toFixMotorBlockingIssueFirstMethod(); /*
this.toFixMotorBlockingIssueSecondMethod(); Check if dpad_up/down is being pressed for slides
this.changeCentricity(); */
dualSlides.update();
dualSlidesToLowBucketPosition();
dualSlidesToHighBucketPosition();
follower.setTeleOpMovementVectors(-this.GP1.left_stick_y, -this.GP1.left_stick_x, -this.GP1.right_stick_x, centricity); follower.setTeleOpMovementVectors(-gamepad1.left_stick_y, -gamepad1.left_stick_x, -gamepad1.right_stick_x);
follower.update(); follower.update();
this.telemetry.addData("Field State", this.fieldStates.getFieldLocation());
} }
public void changeCentricity() { private void dualSlidesToHighBucketPosition() {
if (this.currentGP1.left_bumper && !this.previousGP1.left_bumper) { if (currentGamepad1.dpad_up && !previousGamepad1.dpad_up) {
this.centricity = !centricity; dualSlides.toHighBucketPosition();
this.follower.breakFollowing();
this.follower.startTeleopDrive();
} }
} }
public void toFixMotorBlockingIssueFirstMethod() { private void dualSlidesToLowBucketPosition() {
if (this.currentGP1.cross && !this.previousGP1.cross) { if (currentGamepad1.dpad_down && !previousGamepad1.dpad_down) {
fieldStates.setFieldLocation(FieldStates.FieldLocation.BUCKET); dualSlides.toLowBucketPosition();
Actions.runBlocking(new SequentialAction(
this.zeroOutPower(),
new SleepAction(3),
this.returnToMaxPower()
));
fieldStates.setFieldLocation(FieldStates.FieldLocation.TRAVELING);
} }
} }
public void toFixMotorBlockingIssueSecondMethod() {
if (this.currentGP1.circle && !this.previousGP1.circle) {
this.follower.breakFollowing();
fieldStates.setFieldLocation(FieldStates.FieldLocation.BUCKET);
Actions.runBlocking(new SequentialAction(
new SleepAction(3)
));
fieldStates.setFieldLocation(FieldStates.FieldLocation.TRAVELING);
this.follower.startTeleopDrive();
}
}
} }

View File

@ -1,135 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "AsherPathV1", group = "Autonomous Pathing Tuning")
public class AsherPathV1 extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(10.0, 40, 90);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.4);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
/*
* Only update this path
*/
.addPath(
// Line 1
new BezierCurve(
new Point(9.757, 84.983, Point.CARTESIAN),
new Point(33.000, 105.000, Point.CARTESIAN),
new Point(80.000, 118.000, Point.CARTESIAN),
new Point(55.000, 120.000, Point.CARTESIAN)
)
)
.addPath(
// Line 2
new BezierCurve(
new Point(55.000, 120.000, Point.CARTESIAN),
new Point(22.000, 106.000, Point.CARTESIAN),
new Point(11.000, 131.000, Point.CARTESIAN)
)
)
.addPath(
// Line 3
new BezierCurve(
new Point(11.000, 131.000, Point.CARTESIAN),
new Point(75.000, 95.000, Point.CARTESIAN),
new Point(112.000, 132.000, Point.CARTESIAN),
new Point(61.000, 131.000, Point.CARTESIAN)
)
)
.addPath(
// Line 4
new BezierLine(
new Point(61.000, 131.000, Point.CARTESIAN),
new Point(11.000, 131.000, Point.CARTESIAN)
)
)
.addPath(
// Line 5
new BezierCurve(
new Point(11.000, 131.000, Point.CARTESIAN),
new Point(100.000, 118.000, Point.CARTESIAN),
new Point(103.000, 135.000, Point.CARTESIAN),
new Point(61.000, 135.000, Point.CARTESIAN)
)
)
.addPath(
// Line 6
new BezierLine(
new Point(61.000, 135.000, Point.CARTESIAN),
new Point(11.000, 131.000, Point.CARTESIAN)
)
)
.addPath(
// Line 7
new BezierCurve(
new Point(11.000, 131.000, Point.CARTESIAN),
new Point(113.000, 95.000, Point.CARTESIAN),
new Point(67.000, 95.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90)).build();
/*
* End of only update this path
*/
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -1,79 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "AutoExample - Straight Path", group = "Autonomous Pathing Tuning")
public class AutoExample extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(0.0, 20.0, 0);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.6);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierLine(
new Point(0.000, 20.000, Point.CARTESIAN),
new Point(50.000, 20.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -1,106 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "AutoExample - 2 Curves/2 Lines", group = "Autonomous Pathing Tuning")
public class AutoExampleFour extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(12,60, 0);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.45);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierCurve(
new Point(12.000, 60.000, Point.CARTESIAN),
new Point(60.000, 60.000, Point.CARTESIAN),
new Point(60.000, 12.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(-90))
.addPath(
// Line 2
new BezierLine(
new Point(60.000, 12.000, Point.CARTESIAN),
new Point(40.000, 12.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(-90), Math.toRadians(-90))
.addPath(
// Line 3
new BezierCurve(
new Point(40.000, 12.000, Point.CARTESIAN),
new Point(35.000, 35.000, Point.CARTESIAN),
new Point(12.000, 35.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(-90), Math.toRadians(-90))
.addPath(
// Line 4
new BezierLine(
new Point(12.000, 35.000, Point.CARTESIAN),
new Point(12.000, 60.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(-90), Math.toRadians(0))
.build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -1,142 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "AutoExampleSeason2025V1", group = "Autonomous Pathing Tuning")
public class AutoExampleSeason2025V1 extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(15.0, 35, 90);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.375);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierLine(
new Point(15.000, 35.000, Point.CARTESIAN),
new Point(60.000, 35.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 2
new BezierLine(
new Point(60.000, 35.000, Point.CARTESIAN),
new Point(60.000, 25.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 3
new BezierLine(
new Point(60.000, 25.000, Point.CARTESIAN),
new Point(15.000, 25.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 4
new BezierLine(
new Point(15.000, 25.000, Point.CARTESIAN),
new Point(60.000, 25.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 5
new BezierLine(
new Point(60.000, 25.000, Point.CARTESIAN),
new Point(60.000, 15.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 6
new BezierLine(
new Point(60.000, 15.000, Point.CARTESIAN),
new Point(15.000, 15.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 7
new BezierLine(
new Point(15.000, 15.000, Point.CARTESIAN),
new Point(60.000, 15.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 8
new BezierLine(
new Point(60.000, 15.000, Point.CARTESIAN),
new Point(60.000, 8.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 9
new BezierLine(
new Point(60.000, 8.000, Point.CARTESIAN),
new Point(15.000, 8.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90)).build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -1,89 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "AutoExample - Curve and Line", group = "Autonomous Pathing Tuning")
public class AutoExampleThree extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(10,45, 0);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.4);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierCurve(
new Point(10.000, 45.000, Point.CARTESIAN),
new Point(45.000, 45.000, Point.CARTESIAN),
new Point(50.000, 20.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(-90))
.addPath(
// Line 2
new BezierLine(
new Point(50.000, 20.000, Point.CARTESIAN),
new Point(10.000, 20.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(-90), Math.toRadians(-90))
.build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -1,80 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "AutoExample - Simple Curve", group = "Autonomous Pathing Tuning")
public class AutoExampleTwo extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(10.0, 45, 0);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.4);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierCurve(
new Point(10.000, 45.000, Point.CARTESIAN),
new Point(45.000, 45.000, Point.CARTESIAN),
new Point(50.000, 20.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(-90))
.build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -1,91 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous (name = "Test", group = "Autonomous Pathing Tuning")
public class AutoTest extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain test;
@Override
public void init() {
follower = new Follower(hardwareMap);
test = follower.pathBuilder()
.addPath(
new BezierLine(
new Point(8.000, 60.000, Point.CARTESIAN),
new Point(18.000, 60.000, Point.CARTESIAN)
)
)
.addPath(
// Line 2
new BezierCurve(
new Point(18.000, 60.000, Point.CARTESIAN),
new Point(18.000, 23.000, Point.CARTESIAN),
new Point(48.000, 23.000, Point.CARTESIAN)
)
)
.addPath(
// Line 3
new BezierLine(
new Point(48.000, 23.000, Point.CARTESIAN),
new Point(60.000, 36.000, Point.CARTESIAN)
)
)
.addPath(
// Line 4
new BezierLine(
new Point(60.000, 36.000, Point.CARTESIAN),
new Point(60.000, 49.000, Point.CARTESIAN)
)
).build();
follower.followPath(test);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(test);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -1,128 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
public class BlueAuto {
public void GeneratedPath() {
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierLine(
new Point(9.757, 84.983, Point.CARTESIAN),
new Point(8.442, 129.227, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 2
new BezierLine(
new Point(8.442, 129.227, Point.CARTESIAN),
new Point(52.762, 101.628, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 3
new BezierLine(
new Point(52.762, 101.628, Point.CARTESIAN),
new Point(79.224, 116.564, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 4
new BezierLine(
new Point(79.224, 116.564, Point.CARTESIAN),
new Point(54.548, 130.525, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 5
new BezierLine(
new Point(54.548, 130.525, Point.CARTESIAN),
new Point(12.338, 133.772, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 6
new BezierLine(
new Point(12.338, 133.772, Point.CARTESIAN),
new Point(52.437, 101.628, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 7
new BezierLine(
new Point(52.437, 101.628, Point.CARTESIAN),
new Point(71.594, 120.947, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 8
new BezierLine(
new Point(71.594, 120.947, Point.CARTESIAN),
new Point(52.275, 120.785, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 9
new BezierLine(
new Point(52.275, 120.785, Point.CARTESIAN),
new Point(11.039, 131.012, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 10
new BezierLine(
new Point(11.039, 131.012, Point.CARTESIAN),
new Point(70.782, 120.460, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 11
new BezierLine(
new Point(70.782, 120.460, Point.CARTESIAN),
new Point(50.327, 142.377, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 12
new BezierLine(
new Point(50.327, 142.377, Point.CARTESIAN),
new Point(13.799, 134.422, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 13
new BezierLine(
new Point(13.799, 134.422, Point.CARTESIAN),
new Point(13.799, 134.422, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 14
new BezierLine(
new Point(13.799, 134.422, Point.CARTESIAN),
new Point(71.919, 103.901, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation();
}
}

View File

@ -1,161 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "BlueBasketAuto", group = "Autonomous Pathing Tuning")
public class BlueBasketAuto extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(11.25, 95.75, 0);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.45);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierLine(
new Point(11.250, 95.750, Point.CARTESIAN),
new Point(37.000, 108.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 2
new BezierCurve(
new Point(37.000, 108.000, Point.CARTESIAN),
new Point(73.286, 111.536, Point.CARTESIAN),
new Point(67.821, 120.536, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 3
new BezierLine(
new Point(67.821, 120.536, Point.CARTESIAN),
new Point(28.000, 121.500, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 4
new BezierLine(
new Point(28.000, 121.500, Point.CARTESIAN),
new Point(18.000, 130.179, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 5
new BezierCurve(
new Point(18.000, 130.179, Point.CARTESIAN),
new Point(59.000, 102.500, Point.CARTESIAN),
new Point(68.700, 130.500, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 6
new BezierLine(
new Point(68.700, 130.500, Point.CARTESIAN),
new Point(18.000, 130.339, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 7
new BezierCurve(
new Point(18.000, 130.339, Point.CARTESIAN),
new Point(49.018, 121.179, Point.CARTESIAN),
new Point(63.804, 135.321, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 8
new BezierLine(
new Point(63.804, 135.321, Point.CARTESIAN),
new Point(53.036, 135.161, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 9
new BezierLine(
new Point(53.036, 135.161, Point.CARTESIAN),
new Point(18.643, 135.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 10
new BezierLine(
new Point(18.643, 135.000, Point.CARTESIAN),
new Point(72.300, 97.400, Point.CARTESIAN)
)
)
.addPath(
// Line 9
new BezierLine(
new Point(18.643, 135.000, Point.CARTESIAN),
new Point(83.250, 95.464, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(270)).build();
follower.followPath(path, true);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path, true);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -1,246 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/**
* This is the Circle autonomous OpMode. It runs the robot in a PathChain that's actually not quite
* a circle, but some Bezier curves that have control points set essentially in a square. However,
* it turns enough to tune your centripetal force correction and some of your heading. Some lag in
* heading is to be expected.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/12/2024
*/
@Config
@Autonomous(name = "BluebAutoV1", group = "Autonomous Pathing Tuning")
public class BluebAutoV1 extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(7.5, 72, 90);
/**
* This initializes the Follower and creates the PathChain for the "circle". Additionally, this
* initializes the FTC Dashboard telemetry.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.4);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierLine(
new Point(7.5, 72, Point.CARTESIAN),
new Point(29.893, 38.250, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 2
new BezierLine(
new Point(29.893, 38.250, Point.CARTESIAN),
new Point(65.250, 32.143, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 3
new BezierLine(
new Point(65.250, 32.143, Point.CARTESIAN),
new Point(61.714, 24.429, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 4
new BezierLine(
new Point(61.714, 24.429, Point.CARTESIAN),
new Point(13.821, 22.821, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 5
new BezierLine(
new Point(13.821, 22.821, Point.CARTESIAN),
new Point(61.714, 24.429, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 6
new BezierLine(
new Point(61.714, 24.429, Point.CARTESIAN),
new Point(60.750, 12.696, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 7
new BezierLine(
new Point(60.750, 12.696, Point.CARTESIAN),
new Point(12.375, 13.179, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 8
new BezierLine(
new Point(12.375, 13.179, Point.CARTESIAN),
new Point(60.750, 12.536, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 9
new BezierLine(
new Point(60.750, 12.536, Point.CARTESIAN),
new Point(60.589, 9.321, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 10
new BezierLine(
new Point(60.589, 9.321, Point.CARTESIAN),
new Point(12.536, 8.357, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 11
new BezierLine(
new Point(12.536, 8.357, Point.CARTESIAN),
new Point(26.679, 8.679, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 12
new BezierLine(
new Point(26.679, 8.679, Point.CARTESIAN),
new Point(22.821, 109.446, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 13
new BezierLine(
new Point(22.821, 109.446, Point.CARTESIAN),
new Point(70.714, 109.446, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 14
new BezierLine(
new Point(70.714, 109.446, Point.CARTESIAN),
new Point(71.036, 120.214, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 15
new BezierLine(
new Point(71.036, 120.214, Point.CARTESIAN),
new Point(22.179, 120.214, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 16
new BezierLine(
new Point(22.179, 120.214, Point.CARTESIAN),
new Point(11.089, 130.821, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 17
new BezierLine(
new Point(11.089, 130.821, Point.CARTESIAN),
new Point(70.714, 112.018, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 18
new BezierLine(
new Point(70.714, 112.018, Point.CARTESIAN),
new Point(70.714, 128.250, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 19
new BezierLine(
new Point(70.714, 128.250, Point.CARTESIAN),
new Point(9.964, 130.018, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 20
new BezierLine(
new Point(9.964, 130.018, Point.CARTESIAN),
new Point(70.554, 130.500, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 21
new BezierLine(
new Point(70.554, 130.500, Point.CARTESIAN),
new Point(70.393, 135.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90))
.addPath(
// Line 22
new BezierLine(
new Point(70.393, 135.000, Point.CARTESIAN),
new Point(13.821, 134.839, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(90)).build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
/**
* This runs the OpMode, updating the Follower as well as printing out the debug statements to
* the Telemetry, as well as the FTC Dashboard.
*/
@Override
public void loop() {
follower.update();
if (follower.atParametricEnd()) {
follower.followPath(path);
}
follower.telemetryDebug(telemetryA);
}
}

View File

@ -1,4 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
public class BluenbAutov1 {
}

View File

@ -1,109 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
public class GeneratedPath {
public GeneratedPath() {
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierLine(
new Point(9.757, 84.983, Point.CARTESIAN),
new Point(28.573, 76.302, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 2
new BezierLine(
new Point(28.573, 76.302, Point.CARTESIAN),
new Point(36.203, 76.140, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 3
new BezierLine(
new Point(36.203, 76.140, Point.CARTESIAN),
new Point(35.067, 35.716, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 4
new BezierLine(
new Point(35.067, 35.716, Point.CARTESIAN),
new Point(73.705, 34.742, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 5
new BezierLine(
new Point(73.705, 34.742, Point.CARTESIAN),
new Point(73.705, 24.839, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 6
new BezierLine(
new Point(73.705, 24.839, Point.CARTESIAN),
new Point(7.630, 26.462, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 7
new BezierLine(
new Point(7.630, 26.462, Point.CARTESIAN),
new Point(64.126, 22.728, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 8
new BezierLine(
new Point(64.126, 22.728, Point.CARTESIAN),
new Point(63.964, 13.150, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 9
new BezierLine(
new Point(63.964, 13.150, Point.CARTESIAN),
new Point(12.338, 15.260, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 10
new BezierLine(
new Point(12.338, 15.260, Point.CARTESIAN),
new Point(63.802, 13.150, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 11
new BezierLine(
new Point(63.802, 13.150, Point.CARTESIAN),
new Point(63.639, 11.689, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 12
new BezierLine(
new Point(63.639, 11.689, Point.CARTESIAN),
new Point(12.014, 11.689, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation();
}
}

View File

@ -1,99 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
public class RedAuto {
public class GeneratedPath {
public GeneratedPath() {
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierLine(
new Point(131.499, 58.931, Point.CARTESIAN),
new Point(131.986, 18.183, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 2
new BezierLine(
new Point(131.986, 18.183, Point.CARTESIAN),
new Point(90.264, 40.911, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 3
new BezierLine(
new Point(90.264, 40.911, Point.CARTESIAN),
new Point(83.445, 26.300, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 4
new BezierLine(
new Point(83.445, 26.300, Point.CARTESIAN),
new Point(136.207, 14.286, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 5
new BezierLine(
new Point(136.207, 14.286, Point.CARTESIAN),
new Point(81.497, 24.352, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 6
new BezierLine(
new Point(81.497, 24.352, Point.CARTESIAN),
new Point(82.634, 12.988, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 7
new BezierLine(
new Point(82.634, 12.988, Point.CARTESIAN),
new Point(133.935, 11.364, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 8
new BezierLine(
new Point(133.935, 11.364, Point.CARTESIAN),
new Point(82.309, 11.689, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 9
new BezierLine(
new Point(82.309, 11.689, Point.CARTESIAN),
new Point(83.445, 2.598, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 10
new BezierLine(
new Point(83.445, 2.598, Point.CARTESIAN),
new Point(132.149, 10.390, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation();
}
}
}

View File

@ -1,185 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.qualcomm.hardware.bosch.BNO055IMU;
import com.qualcomm.hardware.bosch.JustLoggingAccelerationIntegrator;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.Func;
import org.firstinspires.ftc.robotcore.external.navigation.Acceleration;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.external.navigation.Position;
import org.firstinspires.ftc.robotcore.external.navigation.Velocity;
import java.util.Locale;
/*
* This OpMode gives a short demo on how to use the BNO055 Inertial Motion Unit (IMU) from AdaFruit.
*
* Note: this is a Legacy example that will not work with newer Control/Expansion Hubs that use a different IMU
* Please use the new SensorIMUOrthogonal or SensorIMUNonOrthogonal samples for a more universal IMU interface.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*
* @see <a href="http://www.adafruit.com/products/2472">Adafruit IMU</a>
*/
@TeleOp(name = "Sensor: BNO055 IMU", group = "Sensor")
public class SensorBNO055IMU extends LinearOpMode
{
//----------------------------------------------------------------------------------------------
// State
//----------------------------------------------------------------------------------------------
// The IMU sensor object
BNO055IMU imu;
// State used for updating telemetry
Orientation angles;
Acceleration gravity;
//----------------------------------------------------------------------------------------------
// Main logic
//----------------------------------------------------------------------------------------------
@Override public void runOpMode() {
// Set up the parameters with which we will use our IMU. Note that integration
// algorithm here just reports accelerations to the logcat log; it doesn't actually
// provide positional information.
BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
parameters.angleUnit = BNO055IMU.AngleUnit.DEGREES;
parameters.accelUnit = BNO055IMU.AccelUnit.METERS_PERSEC_PERSEC;
parameters.calibrationDataFile = "BNO055IMUCalibration.json"; // see the calibration sample OpMode
parameters.loggingEnabled = true;
parameters.loggingTag = "IMU";
parameters.accelerationIntegrationAlgorithm = new JustLoggingAccelerationIntegrator();
// Retrieve and initialize the IMU. We expect the IMU to be attached to an I2C port
// on a Core Device Interface Module, configured to be a sensor of type "AdaFruit IMU",
// and named "imu".
imu = hardwareMap.get(BNO055IMU.class, "adafruit_imu");
imu.initialize(parameters);
// Set up our telemetry dashboard
composeTelemetry();
// Wait until we're told to go
waitForStart();
// Start the logging of measured acceleration
imu.startAccelerationIntegration(new Position(), new Velocity(), 1000);
// Loop and update the dashboard
while (opModeIsActive()) {
telemetry.update();
}
}
//----------------------------------------------------------------------------------------------
// Telemetry Configuration
//----------------------------------------------------------------------------------------------
void composeTelemetry() {
// At the beginning of each telemetry update, grab a bunch of data
// from the IMU that we will then display in separate lines.
telemetry.addAction(new Runnable() { @Override public void run()
{
// Acquiring the angles is relatively expensive; we don't want
// to do that in each of the three items that need that info, as that's
// three times the necessary expense.
angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
gravity = imu.getGravity();
}
});
telemetry.addLine()
.addData("status", new Func<String>() {
@Override public String value() {
return imu.getSystemStatus().toShortString();
}
})
.addData("calib", new Func<String>() {
@Override public String value() {
return imu.getCalibrationStatus().toString();
}
});
telemetry.addLine()
.addData("heading", new Func<String>() {
@Override public String value() {
return formatAngle(angles.angleUnit, angles.firstAngle);
}
})
.addData("roll", new Func<String>() {
@Override public String value() {
return formatAngle(angles.angleUnit, angles.secondAngle);
}
})
.addData("pitch", new Func<String>() {
@Override public String value() {
return formatAngle(angles.angleUnit, angles.thirdAngle);
}
});
telemetry.addLine()
.addData("grvty", new Func<String>() {
@Override public String value() {
return gravity.toString();
}
})
.addData("mag", new Func<String>() {
@Override public String value() {
return String.format(Locale.getDefault(), "%.3f",
Math.sqrt(gravity.xAccel*gravity.xAccel
+ gravity.yAccel*gravity.yAccel
+ gravity.zAccel*gravity.zAccel));
}
});
}
//----------------------------------------------------------------------------------------------
// Formatting
//----------------------------------------------------------------------------------------------
String formatAngle(AngleUnit angleUnit, double angle) {
return formatDegrees(AngleUnit.DEGREES.fromUnit(angleUnit, angle));
}
String formatDegrees(double degrees){
return String.format(Locale.getDefault(), "%.1f", AngleUnit.DEGREES.normalize(degrees));
}
}

View File

@ -1,229 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.qualcomm.hardware.bosch.BNO055IMU;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.util.ReadWriteFile;
import org.firstinspires.ftc.robotcore.external.Func;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.internal.system.AppUtil;
import java.io.File;
import java.util.Locale;
/*
* This OpMode calibrates a BNO055 IMU per
* "Section 3.11 Calibration" of the BNO055 specification.
*
* Note: this is a Legacy example that will not work with newer Control/Expansion Hubs that use a different IMU
* Please use the new SensorIMUOrthogonal or SensorIMUNonOrthogonal samples for a more universal IMU interface.
*
* Manual calibration of the IMU is definitely NOT necessary: except for the magnetometer (which is not used by the
* default "IMU" SensorMode), the BNO055 is internally self-calibrating and thus can be very successfully used without
* manual intervention. That said, performing a one-time calibration, saving the results persistently, then loading them
* again at each run can help reduce the time that automatic calibration requires.
*
* This summary of the calibration process from Intel is informative:
* http://iotdk.intel.com/docs/master/upm/classupm_1_1_b_n_o055.html
*
* "This device requires calibration in order to operate accurately. [...] Calibration data is
* lost on a power cycle. See one of the examples for a description of how to calibrate the device,
* but in essence:
*
* There is a calibration status register available [...] that returns the calibration status
* of the accelerometer (ACC), magnetometer (MAG), gyroscope (GYR), and overall system (SYS).
* Each of these values range from 0 (uncalibrated) to 3 (fully calibrated). Calibration [ideally]
* involves certain motions to get all 4 values at 3. The motions are as follows (though see the
* datasheet for more information):
*
* 1. GYR: Simply let the sensor sit flat for a few seconds.</ol>
* 2. ACC: Move the sensor in various positions. Start flat, then rotate slowly by 45
* degrees, hold for a few seconds, then continue rotating another 45 degrees and
* hold, etc. 6 or more movements of this type may be required. You can move through
* any axis you desire, but make sure that the device is lying at least once
* perpendicular to the x, y, and z axis.</ol>
* 3. MAG: Move slowly in a figure 8 pattern in the air, until the calibration values reaches 3.</ol>
* 4. SYS: This will usually reach 3 when the other items have also reached 3. If not, continue
* slowly moving the device though various axes until it does."</ol>
*
* To calibrate the IMU, run this sample OpMode with a gamepad attached to the driver station.
* Once the IMU has reached sufficient calibration as reported on telemetry, press the 'A'
* button on the gamepad to write the calibration to a file. That file can then be indicated
* later when running an OpMode which uses the IMU.
*
* Note: if your intended uses of the IMU do not include use of all its sensors (for example,
* you might not use the magnetometer), then it makes little sense for you to wait for full
* calibration of the sensors you are not using before saving the calibration data. Indeed,
* it appears that in a SensorMode that doesn't use the magnetometer (for example), the
* magnetometer cannot actually be calibrated.
*
* References:
* The AdafruitBNO055IMU Javadoc
* The BNO055IMU.Parameters.calibrationDataFile Javadoc
* The BNO055 product page: https://www.bosch-sensortec.com/bst/products/all_products/bno055
* The BNO055 datasheet: https://www.bosch-sensortec.com/media/boschsensortec/downloads/datasheets/bst-bno055-ds000.pdf
*/
@TeleOp(name = "Sensor: BNO055 IMU Calibration", group = "Sensor")
public class SensorBNO055IMUCalibration extends LinearOpMode
{
//----------------------------------------------------------------------------------------------
// State
//----------------------------------------------------------------------------------------------
// Our sensors, motors, and other devices go here, along with other long term state
BNO055IMU imu;
// State used for updating telemetry
Orientation angles;
//----------------------------------------------------------------------------------------------
// Main logic
//----------------------------------------------------------------------------------------------
@Override public void runOpMode() {
telemetry.log().setCapacity(12);
telemetry.log().add("");
telemetry.log().add("Please refer to the calibration instructions");
telemetry.log().add("contained in the Adafruit IMU calibration");
telemetry.log().add("sample OpMode.");
telemetry.log().add("");
telemetry.log().add("When sufficient calibration has been reached,");
telemetry.log().add("press the 'A' button to write the current");
telemetry.log().add("calibration data to a file.");
telemetry.log().add("");
// We are expecting the IMU to be attached to an I2C port on a Core Device Interface Module and named "imu".
BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
parameters.loggingEnabled = true;
parameters.loggingTag = "IMU";
imu = hardwareMap.get(BNO055IMU.class, "adafruit_imu");
imu.initialize(parameters);
composeTelemetry();
telemetry.log().add("Waiting for start...");
// Wait until we're told to go
while (!isStarted()) {
telemetry.update();
idle();
}
telemetry.log().add("...started...");
while (opModeIsActive()) {
if (gamepad1.a) {
// Get the calibration data
BNO055IMU.CalibrationData calibrationData = imu.readCalibrationData();
// Save the calibration data to a file. You can choose whatever file
// name you wish here, but you'll want to indicate the same file name
// when you initialize the IMU in an OpMode in which it is used. If you
// have more than one IMU on your robot, you'll of course want to use
// different configuration file names for each.
String filename = "AdafruitIMUCalibration.json";
File file = AppUtil.getInstance().getSettingsFile(filename);
ReadWriteFile.writeFile(file, calibrationData.serialize());
telemetry.log().add("saved to '%s'", filename);
// Wait for the button to be released
while (gamepad1.a) {
telemetry.update();
idle();
}
}
telemetry.update();
}
}
void composeTelemetry() {
// At the beginning of each telemetry update, grab a bunch of data
// from the IMU that we will then display in separate lines.
telemetry.addAction(new Runnable() { @Override public void run()
{
// Acquiring the angles is relatively expensive; we don't want
// to do that in each of the three items that need that info, as that's
// three times the necessary expense.
angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
}
});
telemetry.addLine()
.addData("status", new Func<String>() {
@Override public String value() {
return imu.getSystemStatus().toShortString();
}
})
.addData("calib", new Func<String>() {
@Override public String value() {
return imu.getCalibrationStatus().toString();
}
});
telemetry.addLine()
.addData("heading", new Func<String>() {
@Override public String value() {
return formatAngle(angles.angleUnit, angles.firstAngle);
}
})
.addData("roll", new Func<String>() {
@Override public String value() {
return formatAngle(angles.angleUnit, angles.secondAngle);
}
})
.addData("pitch", new Func<String>() {
@Override public String value() {
return formatAngle(angles.angleUnit, angles.thirdAngle);
}
});
}
//----------------------------------------------------------------------------------------------
// Formatting
//----------------------------------------------------------------------------------------------
String formatAngle(AngleUnit angleUnit, double angle) {
return formatDegrees(AngleUnit.DEGREES.fromUnit(angleUnit, angle));
}
String formatDegrees(double degrees){
return String.format(Locale.getDefault(), "%.1f", AngleUnit.DEGREES.normalize(degrees));
}
}

View File

@ -1,172 +0,0 @@
/* Copyright (c) 2022 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode.cometbots.projects;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_ENCODER_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.IMU_LOGO_FACING_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.IMU_USB_FACING_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.LEFT_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.LEFT_ENCODER_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.RIGHT_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.RIGHT_ENCODER_DIRECTION;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
import org.firstinspires.ftc.teamcode.PedroConstants;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
/*
* This OpMode shows how to use the new universal IMU interface. This
* interface may be used with the BNO055 IMU or the BHI260 IMU. It assumes that an IMU is configured
* on the robot with the name "imu".
*
* The sample will display the current Yaw, Pitch and Roll of the robot.<br>
* With the correct orientation parameters selected, pitch/roll/yaw should act as follows:
* Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X) <br>
* Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y) <br>
* Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z) <br>
*
* The yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller)
*
* This specific sample assumes that the Hub is mounted on one of the three orthogonal planes
* (X/Y, X/Z or Y/Z) and that the Hub has only been rotated in a range of 90 degree increments.
*
* Note: if your Hub is mounted on a surface angled at some non-90 Degree multiple (like 30) look at
* the alternative SensorIMUNonOrthogonal sample in this folder.
*
* This "Orthogonal" requirement means that:
*
* 1) The Logo printed on the top of the Hub can ONLY be pointing in one of six directions:
* FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT.
*
* 2) The USB ports can only be pointing in one of the same six directions:<br>
* FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT.
*
* So, To fully define how your Hub is mounted to the robot, you must simply specify:<br>
* logoFacingDirection<br>
* usbFacingDirection
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* Finally, choose the two correct parameters to define how your Hub is mounted and edit this OpMode
* to use those parameters.
*/
@TeleOp(name = "Sensor: IMU Orthogonal", group = "Sensor")
@Disabled // Comment this out to add to the OpMode list
public class SensorIMUOrthogonal extends LinearOpMode {
// The IMU sensor object
IMU imu;
private Encoder leftEncoder;
private Encoder rightEncoder;
private Encoder strafeEncoder;
//----------------------------------------------------------------------------------------------
// Main logic
//----------------------------------------------------------------------------------------------
@Override
public void runOpMode() throws InterruptedException {
// Retrieve and initialize the IMU.
// This sample expects the IMU to be in a REV Hub and named "imu".
imu = hardwareMap.get(IMU.class, PedroConstants.IMU);
// TODO: replace these with your encoder ports
leftEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, LEFT_ENCODER));
rightEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, RIGHT_ENCODER));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary
leftEncoder.setDirection(LEFT_ENCODER_DIRECTION);
rightEncoder.setDirection(RIGHT_ENCODER_DIRECTION);
strafeEncoder.setDirection(BACK_ENCODER_DIRECTION);
/* Define how the hub is mounted on the robot to get the correct Yaw, Pitch and Roll values.
*
* Two input parameters are required to fully specify the Orientation.
* The first parameter specifies the direction the printed logo on the Hub is pointing.
* The second parameter specifies the direction the USB connector on the Hub is pointing.
* All directions are relative to the robot, and left/right is as-viewed from behind the robot.
*
* If you are using a REV 9-Axis IMU, you can use the Rev9AxisImuOrientationOnRobot class instead of the
* RevHubOrientationOnRobot class, which has an I2cPortFacingDirection instead of a UsbFacingDirection.
*/
/* The next two lines define Hub orientation.
* The Default Orientation (shown) is when a hub is mounted horizontally with the printed logo pointing UP and the USB port pointing FORWARD.
*
* To Do: EDIT these two lines to match YOUR mounting configuration.
*/
RevHubOrientationOnRobot.LogoFacingDirection logoDirection = IMU_LOGO_FACING_DIRECTION;
RevHubOrientationOnRobot.UsbFacingDirection usbDirection = IMU_USB_FACING_DIRECTION;
RevHubOrientationOnRobot orientationOnRobot = new RevHubOrientationOnRobot(logoDirection, usbDirection);
// Now initialize the IMU with this mounting orientation
// Note: if you choose two conflicting directions, this initialization will cause a code exception.
imu.initialize(new IMU.Parameters(orientationOnRobot));
// Loop and update the dashboard
while (!isStopRequested()) {
telemetry.addData("Hub orientation", "Logo=%s USB=%s\n ", logoDirection, usbDirection);
// Check to see if heading reset is requested
if (gamepad1.y) {
telemetry.addData("Yaw", "Resetting\n");
imu.resetYaw();
} else {
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n");
}
// Retrieve Rotational Angles and Velocities
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
telemetry.update();
}
}
}

View File

@ -1,237 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
@Autonomous(name = "org.firstinspires.ftc.teamcode.cometbots.projects.bBlueAutoV1", group = "V1")
public class bBlueAutoV1 extends OpMode {
public Telemetry telemetry;
public Follower robot;
public PathChain path;
@Override
public void init() {
robot = new Follower(hardwareMap);
PathBuilder builder = new PathBuilder();
path = builder
.addPath(
// Line 1
new BezierLine(
new Point(9.757, 84.983, Point.CARTESIAN),
new Point(28.573, 76.302, Point.CARTESIAN)
)
)
.addPath(
// Line 2
new BezierLine(
new Point(28.573, 76.302, Point.CARTESIAN),
new Point(36.203, 76.140, Point.CARTESIAN)
)
)
.addPath(
// Line 3
new BezierLine(
new Point(36.203, 76.140, Point.CARTESIAN),
new Point(35.067, 35.716, Point.CARTESIAN)
)
)
.addPath(
// Line 4
new BezierLine(
new Point(35.067, 35.716, Point.CARTESIAN),
new Point(73.705, 34.742, Point.CARTESIAN)
)
)
.addPath(
// Line 5
new BezierLine(
new Point(73.705, 34.742, Point.CARTESIAN),
new Point(73.705, 24.839, Point.CARTESIAN)
)
)
.addPath(
// Line 6
new BezierLine(
new Point(73.705, 24.839, Point.CARTESIAN),
new Point(7.630, 26.462, Point.CARTESIAN)
)
)
.addPath(
// Line 7
new BezierLine(
new Point(7.630, 26.462, Point.CARTESIAN),
new Point(64.126, 22.728, Point.CARTESIAN)
)
)
.addPath(
// Line 8
new BezierLine(
new Point(64.126, 22.728, Point.CARTESIAN),
new Point(63.964, 13.150, Point.CARTESIAN)
)
)
.addPath(
// Line 9
new BezierLine(
new Point(63.964, 13.150, Point.CARTESIAN),
new Point(12.338, 15.260, Point.CARTESIAN)
)
)
.addPath(
// Line 10
new BezierLine(
new Point(12.338, 15.260, Point.CARTESIAN),
new Point(63.802, 13.150, Point.CARTESIAN)
)
)
.addPath(
// Line 11
new BezierLine(
new Point(63.802, 13.150, Point.CARTESIAN),
new Point(63.639, 11.689, Point.CARTESIAN)
)
)
.addPath(
// Line 12
new BezierLine(
new Point(63.639, 11.689, Point.CARTESIAN),
new Point(12.014, 11.689, Point.CARTESIAN)
)
)
.addPath(
// Line 13
new BezierLine(
new Point(12.014, 11.689, Point.CARTESIAN),
new Point(62.665, 30.196, Point.CARTESIAN)
)
)
.addPath(
// Line 14
new BezierLine(
new Point(62.665, 30.196, Point.CARTESIAN),
new Point(13.312, 51.463, Point.CARTESIAN)
)
)
.addPath(
// Line 15
new BezierLine(
new Point(13.312, 51.463, Point.CARTESIAN),
new Point(16.234, 103.738, Point.CARTESIAN)
)
)
.addPath(
// Line 16
new BezierLine(
new Point(16.234, 103.738, Point.CARTESIAN),
new Point(68.023, 108.284, Point.CARTESIAN)
)
)
.addPath(
// Line 17
new BezierLine(
new Point(68.023, 108.284, Point.CARTESIAN),
new Point(68.185, 121.109, Point.CARTESIAN)
)
)
.addPath(
// Line 18
new BezierLine(
new Point(68.185, 121.109, Point.CARTESIAN),
new Point(21.754, 119.811, Point.CARTESIAN)
)
)
.addPath(
// Line 19
new BezierLine(
new Point(21.754, 119.811, Point.CARTESIAN),
new Point(11.526, 129.227, Point.CARTESIAN)
)
)
.addPath(
// Line 20
new BezierLine(
new Point(11.526, 129.227, Point.CARTESIAN),
new Point(72.568, 111.856, Point.CARTESIAN)
)
)
.addPath(
// Line 21
new BezierLine(
new Point(72.568, 111.856, Point.CARTESIAN),
new Point(58.607, 128.902, Point.CARTESIAN)
)
)
.addPath(
// Line 22
new BezierLine(
new Point(58.607, 128.902, Point.CARTESIAN),
new Point(11.364, 130.850, Point.CARTESIAN)
)
)
.addPath(
// Line 23
new BezierLine(
new Point(11.364, 130.850, Point.CARTESIAN),
new Point(58.931, 128.577, Point.CARTESIAN)
)
)
.addPath(
// Line 24
new BezierLine(
new Point(58.931, 128.577, Point.CARTESIAN),
new Point(58.769, 133.123, Point.CARTESIAN)
)
)
.addPath(
// Line 25
new BezierLine(
new Point(58.769, 133.123, Point.CARTESIAN),
new Point(13.475, 133.935, Point.CARTESIAN)
)
).build();
;
}
@Override
public void loop() {
robot.update();
if (robot.atParametricEnd())
robot.followPath(path);
robot.telemetryDebug(telemetry);
}
}

View File

@ -1,174 +0,0 @@
package org.firstinspires.ftc.teamcode.cometbots.projects;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
public class bRedAutoV1 {
public bRedAutoV1() {
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierLine(
new Point(133.935, 83.770, Point.CARTESIAN),
new Point(79.874, 117.213, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 2
new BezierLine(
new Point(79.874, 117.213, Point.CARTESIAN),
new Point(79.874, 120.785, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 3
new BezierLine(
new Point(79.874, 120.785, Point.CARTESIAN),
new Point(131.824, 118.349, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 4
new BezierLine(
new Point(131.824, 118.349, Point.CARTESIAN),
new Point(79.549, 120.460, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 5
new BezierLine(
new Point(79.549, 120.460, Point.CARTESIAN),
new Point(79.549, 128.740, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 6
new BezierLine(
new Point(79.549, 128.740, Point.CARTESIAN),
new Point(131.337, 128.090, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 7
new BezierLine(
new Point(131.337, 128.090, Point.CARTESIAN),
new Point(79.549, 128.740, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 8
new BezierLine(
new Point(79.549, 128.740, Point.CARTESIAN),
new Point(79.549, 133.610, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 9
new BezierLine(
new Point(79.549, 133.610, Point.CARTESIAN),
new Point(130.850, 133.285, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 10
new BezierLine(
new Point(130.850, 133.285, Point.CARTESIAN),
new Point(130.201, 36.528, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 11
new BezierLine(
new Point(130.201, 36.528, Point.CARTESIAN),
new Point(84.095, 36.203, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 12
new BezierLine(
new Point(84.095, 36.203, Point.CARTESIAN),
new Point(84.095, 23.378, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 13
new BezierLine(
new Point(84.095, 23.378, Point.CARTESIAN),
new Point(119.811, 23.378, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 14
new BezierLine(
new Point(119.811, 23.378, Point.CARTESIAN),
new Point(127.603, 13.475, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 15
new BezierLine(
new Point(127.603, 13.475, Point.CARTESIAN),
new Point(88.640, 28.248, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 16
new BezierLine(
new Point(88.640, 28.248, Point.CARTESIAN),
new Point(87.666, 15.910, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 17
new BezierLine(
new Point(87.666, 15.910, Point.CARTESIAN),
new Point(127.603, 12.014, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 18
new BezierLine(
new Point(127.603, 12.014, Point.CARTESIAN),
new Point(86.692, 12.825, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 19
new BezierLine(
new Point(86.692, 12.825, Point.CARTESIAN),
new Point(86.692, 10.390, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation()
.addPath(
// Line 20
new BezierLine(
new Point(86.692, 10.390, Point.CARTESIAN),
new Point(126.467, 9.903, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation();
}
}

View File

@ -0,0 +1,72 @@
package org.firstinspires.ftc.teamcode.cometbots.tests;
import static org.firstinspires.ftc.teamcode.PedroConstants.LIFT_SLIDE_LEFT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.LIFT_SLIDE_RIGHT_MOTOR;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.TelemetryPacket;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.util.ElapsedTime;
@Config
@Autonomous(name = "Lift Motor Subsystem - PID Test")
public class DualMotorSliderTest extends LinearOpMode {
private DcMotorEx liftSlideLeft;
private DcMotorEx liftSlideRight;
public static double kp = 0.0015, ki = 0, kd = 0;
private double lastError = 0;
private double integralSum = 0;
public static int targetPosition = 0;
private final FtcDashboard dashboard = FtcDashboard.getInstance();
private ElapsedTime timer = new ElapsedTime();
@Override
public void runOpMode() throws InterruptedException {
TelemetryPacket packet = new TelemetryPacket();
dashboard.setTelemetryTransmissionInterval(25);
liftSlideLeft = hardwareMap.get(DcMotorEx.class, LIFT_SLIDE_LEFT_MOTOR);
liftSlideRight = hardwareMap.get(DcMotorEx.class, LIFT_SLIDE_RIGHT_MOTOR);
liftSlideLeft.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
liftSlideLeft.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
liftSlideLeft.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
liftSlideRight.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
liftSlideRight.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
liftSlideRight.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
waitForStart();
while(opModeIsActive()) {
double power = calculatePower(targetPosition, liftSlideLeft.getCurrentPosition());
packet.put("Power", power);
packet.put("Position", liftSlideLeft.getCurrentPosition());
packet.put("Error", lastError);
packet.put("Seconds", timer.seconds());
liftSlideLeft.setPower(power);
liftSlideRight.setPower(power);
dashboard.sendTelemetryPacket(packet);
}
}
private double calculatePower(int targetPosition, int currentPosition) {
// reference is targetPosition, state is currentPosition
double error = targetPosition - currentPosition;
integralSum += error * timer.seconds();
double derivative = (error - lastError) / timer.seconds();
lastError = error;
timer.reset();
return (error * kp) + (derivative * kd) + (integralSum * ki);
}
}

View File

@ -5,9 +5,7 @@ import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU; import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit; import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.ThreeWheelIMULocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.ThreeWheelLocalizer; import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.ThreeWheelLocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.TwoWheelLocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions; import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector; import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;

View File

@ -1,272 +0,0 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the DriveEncoderLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the drive encoder set up.
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
@Config
public class DriveEncoderLocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder leftFront;
private Encoder rightFront;
private Encoder leftRear;
private Encoder rightRear;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 1;
public static double STRAFE_TICKS_TO_INCHES = 1;
public static double TURN_TICKS_TO_RADIANS = 1;
public static double ROBOT_WIDTH = 1;
public static double ROBOT_LENGTH = 1;
/**
* This creates a new DriveEncoderLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public DriveEncoderLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new DriveEncoderLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public DriveEncoderLocalizer(HardwareMap map, Pose setStartPose) {
hardwareMap = map;
leftFront = new Encoder(hardwareMap.get(DcMotorEx.class, leftFrontMotorName));
leftRear = new Encoder(hardwareMap.get(DcMotorEx.class, leftRearMotorName));
rightRear = new Encoder(hardwareMap.get(DcMotorEx.class, rightRearMotorName));
rightFront = new Encoder(hardwareMap.get(DcMotorEx.class, rightFrontMotorName));
// TODO: reverse any encoders necessary
leftFront.setDirection(Encoder.REVERSE);
rightRear.setDirection(Encoder.REVERSE);
leftRear.setDirection(Encoder.FORWARD);
rightRear.setDirection(Encoder.FORWARD);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders. Then, the robot's global change in position is calculated
* using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano * Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders.
*/
public void updateEncoders() {
leftFront.update();
rightFront.update();
leftRear.update();
rightRear.update();
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
leftFront.reset();
rightFront.reset();
leftRear.reset();
rightRear.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * (leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() + rightRear.getDeltaPosition()));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() - rightRear.getDeltaPosition()));
// theta/turning
returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() - leftRear.getDeltaPosition() + rightRear.getDeltaPosition()) / (ROBOT_WIDTH + ROBOT_LENGTH)));
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return TURN_TICKS_TO_RADIANS;
}
/**
* This does nothing since this localizer does not use the IMU.
*/
public void resetIMU() {
}
}

View File

@ -1,218 +0,0 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import com.qualcomm.hardware.sparkfun.SparkFunOTOS;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
/**
* This is the OTOSLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the SparkFun OTOS. The diagram below, which is modified from
* Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || || |
* | || || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 7/20/2024
*/
public class OTOSLocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private SparkFunOTOS otos;
private double previousHeading;
private double totalHeading;
/**
* This creates a new OTOSLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public OTOSLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new OTOSLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public OTOSLocalizer(HardwareMap map, Pose setStartPose) {
hardwareMap = map;
// TODO: replace this with your OTOS port
/*
TODO: If you want to use the "SparkFunOTOSCorrected" version of OTOS, then replace the
'SparkFunOTOS.class' below with 'SparkFunOTOSCorrected.class' and set the OTOS as a
"SparkFunOTOS Corrected" in your robot confg
*/
SparkFunOTOS
otos = hardwareMap.get(SparkFunOTOS.class, "sensor_otos");
otos.setLinearUnit(DistanceUnit.INCH);
otos.setAngularUnit(AngleUnit.RADIANS);
// TODO: replace this with your OTOS offset from the center of the robot
// For the OTOS, left/right is the y axis and forward/backward is the x axis, with left being
// positive y and forward being positive x. PI/2 radians is facing forward, and clockwise
// rotation is negative rotation.
otos.setOffset(new SparkFunOTOS.Pose2D(0,0,Math.PI / 2));
// TODO: replace these with your tuned multipliers
otos.setLinearScalar(1.0);
otos.setAngularScalar(1.0);
otos.calibrateImu();
otos.resetTracking();
setStartPose(setStartPose);
totalHeading = 0;
previousHeading = startPose.getHeading();
resetOTOS();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
SparkFunOTOS.Pose2D pose = otos.getPosition();
return MathFunctions.addPoses(startPose, new Pose(pose.x, pose.y, pose.h));
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
SparkFunOTOS.Pose2D OTOSVelocity = otos.getVelocity();
return new Pose(OTOSVelocity.x, OTOSVelocity.y, OTOSVelocity.h);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return getVelocity().getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
resetOTOS();
Pose setOTOSPose = MathFunctions.subtractPoses(setPose, startPose);
otos.setPosition(new SparkFunOTOS.Pose2D(setOTOSPose.getX(), setOTOSPose.getY(), setOTOSPose.getHeading()));
}
/**
* This updates the total heading of the robot. The OTOS handles all other updates itself.
*/
@Override
public void update() {
totalHeading += MathFunctions.getSmallestAngleDifference(otos.getPosition().h, previousHeading);
previousHeading = otos.getPosition().h;
}
/**
* This resets the OTOS.
*/
public void resetOTOS() {
otos.resetTracking();
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from OTOS
* ticks to inches. For the OTOS, this value is the same as the lateral multiplier.
* This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return otos.getLinearScalar();
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* OTOS ticks to inches. For the OTOS, this value is the same as the forward multiplier.
* This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return otos.getLinearScalar();
}
/**
* This returns the multiplier applied to turning movement measurement to convert from OTOS ticks
* to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return otos.getAngularScalar();
}
/**
* This does nothing since this localizer does not use the IMU.
*/
public void resetIMU() {
}
}

View File

@ -1,159 +0,0 @@
//package org.firstinspires.ftc.teamcode.pedroPathing.localization;
//
//import com.acmerobotics.roadrunner.geometry.Pose2d;
//import com.qualcomm.robotcore.hardware.HardwareMap;
//
//import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
//import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
//
//import java.util.ArrayList;
//import java.util.List;
//
///**
// * This is the RRToPedroThreeWheelLocalizer class. This class extends the Localizer superclass and
// * is intended to adapt the old Road Runner three wheel odometry localizer to the new Pedro Pathing
// * localizer system.
// *
// * @author Anyi Lin - 10158 Scott's Bots
// * @version 1.0, 5/9/2024
// */
//public class RRToPedroThreeWheelLocalizer extends Localizer {
// private RoadRunnerThreeWheelLocalizer localizer;
// private double totalHeading;
// private Pose startPose;
// private Pose previousPose;
//
// /**
// * This creates a new RRToPedroThreeWheelLocalizer from a HardwareMap. This adapts the previously
// * used Road Runner localization system to the new Pedro Pathing localization system.
// *
// * @param hardwareMap the HardwareMap
// */
// public RRToPedroThreeWheelLocalizer(HardwareMap hardwareMap) {
// List<Integer> lastTrackingEncPositions = new ArrayList<>();
// List<Integer> lastTrackingEncVels = new ArrayList<>();
//
// localizer = new RoadRunnerThreeWheelLocalizer(hardwareMap, lastTrackingEncPositions, lastTrackingEncVels);
//
// startPose = new Pose();
// previousPose = new Pose();
// }
//
// /**
// * This returns the current pose estimate as a Pose.
// *
// * @return returns the current pose estimate
// */
// @Override
// public Pose getPose() {
// Pose2d pose = localizer.getPoseEstimate();
// return new Pose(pose.getX(), pose.getY(), pose.getHeading());
// }
//
// /**
// * This returns the current velocity estimate as a Pose.
// *
// * @return returns the current velocity estimate
// */
// @Override
// public Pose getVelocity() {
// Pose2d pose = localizer.getPoseVelocity();
// return new Pose(pose.getX(), pose.getY(), pose.getHeading());
// }
//
// /**
// * This returns the current velocity estimate as a Vector.
// *
// * @return returns the current velocity estimate
// */
// @Override
// public Vector getVelocityVector() {
// Pose2d pose = localizer.getPoseVelocity();
// Vector returnVector = new Vector();
// returnVector.setOrthogonalComponents(pose.getX(), pose.getY());
// return returnVector;
// }
//
// /**
// * This sets the start pose. Any movement of the robot is treated as a displacement from the
// * start pose, so moving the start pose will move the current pose estimate the same amount.
// *
// * @param setStart the new start pose
// */
// @Override
// public void setStartPose(Pose setStart) {
// Pose oldStart = startPose;
// startPose = setStart;
// Pose startDiff = MathFunctions.subtractPoses(startPose, oldStart);
// localizer.setPoseEstimate(new Pose2d(getPose().getX() + startDiff.getX(), getPose().getY() + startDiff.getY(), getPose().getHeading() + startDiff.getHeading()));
// }
//
// /**
// * This sets the current pose estimate. This has no effect on the start pose.
// *
// * @param setPose the new current pose estimate
// */
// @Override
// public void setPose(Pose setPose) {
// localizer.setPoseEstimate(new Pose2d(setPose.getX(), setPose.getY(), setPose.getHeading()));
// }
//
// /**
// * This updates the total heading and previous pose estimate. Everything else is handled by the
// * Road Runner localizer on its own, but updating this tells you how far the robot has really
// * turned.
// */
// @Override
// public void update() {
// totalHeading += MathFunctions.getTurnDirection(previousPose.getHeading(), getPose().getHeading()) * MathFunctions.getSmallestAngleDifference(previousPose.getHeading(), getPose().getHeading());
// previousPose = getPose();
// }
//
// /**
// * This returns how far the robot has actually turned.
// *
// * @return returns the total angle turned, in degrees.
// */
// @Override
// public double getTotalHeading() {
// return totalHeading;
// }
//
// /**
// * This returns the forward multiplier of the Road Runner localizer, which converts from ticks
// * to inches. You can actually use the tuners in Pedro Pathing to find the value that everything
// * multiplied together should be. If you do use that, then do be aware that the value returned is
// * the product of the Road Runner ticks to inches and the x multiplier.
// *
// * @return returns the forward multiplier
// */
// @Override
// public double getForwardMultiplier() {
// return RoadRunnerThreeWheelLocalizer.encoderTicksToInches(1) * RoadRunnerThreeWheelLocalizer.X_MULTIPLIER;
// }
//
// /**
// * This returns the lateral multiplier of the Road Runner localizer, which converts from ticks
// * to inches. You can actually use the tuners in Pedro Pathing to find the value that everything
// * multiplied together should be. If you do use that, then do be aware that the value returned is
// * the product of the Road Runner ticks to inches and the y multiplier.
// *
// * @return returns the lateral multiplier
// */
// @Override
// public double getLateralMultiplier() {
// return RoadRunnerThreeWheelLocalizer.encoderTicksToInches(1) * RoadRunnerThreeWheelLocalizer.Y_MULTIPLIER;
// }
//
// /**
// * This returns the turning multiplier of the Road Runner localizer, which doesn't actually exist.
// * There really isn't a point in tuning the turning for the Road Runner localizer. This will
// * actually just return the average of the two other multipliers.
// *
// * @return returns the turning multiplier
// */
// @Override
// public double getTurningMultiplier() {
// return (getForwardMultiplier() + getLateralMultiplier()) / 2;
// }
//}

View File

@ -1,132 +0,0 @@
//package org.firstinspires.ftc.teamcode.pedroPathing.localization;
//
//import com.acmerobotics.roadrunner.util.NanoClock;
//import com.qualcomm.robotcore.hardware.DcMotorEx;
//import com.qualcomm.robotcore.hardware.DcMotorSimple;
//
///**
// * This class is adapted from the Road Runner Encoder class. Later, this will be replaced with a
// * custom encoder class. According to Road Runner, this wraps a motor instance to provide corrected
// * velocity counts and allow reversing independently of the corresponding slot's motor direction.
// *
// * I'm fairly sure I didn't make any changes to this class, just copied it so I wouldn't have to have
// * import statements, so I'm not crediting myself as an author for this.
// *
// * @author Road Runner dev team
// * @version 1.0, 5/9/2024
// */
//public class RoadRunnerEncoder {
// private final static int CPS_STEP = 0x10000;
//
// private static double inverseOverflow(double input, double estimate) {
// // convert to uint16
// int real = (int) input & 0xffff;
// // initial, modulo-based correction: it can recover the remainder of 5 of the upper 16 bits
// // because the velocity is always a multiple of 20 cps due to Expansion Hub's 50ms measurement window
// real += ((real % 20) / 4) * CPS_STEP;
// // estimate-based correction: it finds the nearest multiple of 5 to correct the upper bits by
// real += Math.round((estimate - real) / (5 * CPS_STEP)) * 5 * CPS_STEP;
// return real;
// }
//
// public enum Direction {
// FORWARD(1),
// REVERSE(-1);
//
// private int multiplier;
//
// Direction(int multiplier) {
// this.multiplier = multiplier;
// }
//
// public int getMultiplier() {
// return multiplier;
// }
// }
//
// private DcMotorEx motor;
// private NanoClock clock;
//
// private Direction direction;
//
// private int lastPosition;
// private int velocityEstimateIdx;
// private double[] velocityEstimates;
// private double lastUpdateTime;
//
// public RoadRunnerEncoder(DcMotorEx motor, NanoClock clock) {
// this.motor = motor;
// this.clock = clock;
//
// this.direction = Direction.FORWARD;
//
// this.lastPosition = 0;
// this.velocityEstimates = new double[3];
// this.lastUpdateTime = clock.seconds();
// }
//
// public RoadRunnerEncoder(DcMotorEx motor) {
// this(motor, NanoClock.system());
// }
//
// public Direction getDirection() {
// return direction;
// }
//
// private int getMultiplier() {
// return getDirection().getMultiplier() * (motor.getDirection() == DcMotorSimple.Direction.FORWARD ? 1 : -1);
// }
//
// /**
// * Allows you to set the direction of the counts and velocity without modifying the motor's direction state
// * @param direction either reverse or forward depending on if encoder counts should be negated
// */
// public void setDirection(Direction direction) {
// this.direction = direction;
// }
//
// /**
// * Gets the position from the underlying motor and adjusts for the set direction.
// * Additionally, this method updates the velocity estimates used for compensated velocity
// *
// * @return encoder position
// */
// public int getCurrentPosition() {
// int multiplier = getMultiplier();
// int currentPosition = motor.getCurrentPosition() * multiplier;
// if (currentPosition != lastPosition) {
// double currentTime = clock.seconds();
// double dt = currentTime - lastUpdateTime;
// velocityEstimates[velocityEstimateIdx] = (currentPosition - lastPosition) / dt;
// velocityEstimateIdx = (velocityEstimateIdx + 1) % 3;
// lastPosition = currentPosition;
// lastUpdateTime = currentTime;
// }
// return currentPosition;
// }
//
// /**
// * Gets the velocity directly from the underlying motor and compensates for the direction
// * See {@link #getCorrectedVelocity} for high (>2^15) counts per second velocities (such as on REV Through Bore)
// *
// * @return raw velocity
// */
// public double getRawVelocity() {
// int multiplier = getMultiplier();
// return motor.getVelocity() * multiplier;
// }
//
// /**
// * Uses velocity estimates gathered in {@link #getCurrentPosition} to estimate the upper bits of velocity
// * that are lost in overflow due to velocity being transmitted as 16 bits.
// * CAVEAT: must regularly call {@link #getCurrentPosition} for the compensation to work correctly.
// *
// * @return corrected velocity
// */
// public double getCorrectedVelocity() {
// double median = velocityEstimates[0] > velocityEstimates[1]
// ? Math.max(velocityEstimates[1], Math.min(velocityEstimates[0], velocityEstimates[2]))
// : Math.max(velocityEstimates[0], Math.min(velocityEstimates[1], velocityEstimates[2]));
// return inverseOverflow(getRawVelocity(), median);
// }
//}

View File

@ -1,123 +0,0 @@
//package org.firstinspires.ftc.teamcode.pedroPathing.localization;
//
//import androidx.annotation.NonNull;
//
//import com.acmerobotics.dashboard.config.Config;
//import com.acmerobotics.roadrunner.geometry.Pose2d;
//import com.acmerobotics.roadrunner.localization.ThreeTrackingWheelLocalizer;
//import com.qualcomm.robotcore.hardware.DcMotorEx;
//import com.qualcomm.robotcore.hardware.HardwareMap;
//
//import java.util.Arrays;
//import java.util.List;
//
///*
// * Sample tracking wheel localizer implementation assuming the standard configuration:
// *
// * left on robot is y pos
// *
// * front of robot is x pos
// *
// * /--------------\
// * | ____ |
// * | ---- |
// * | || || |
// * | || || |
// * | |
// * | |
// * \--------------/
// *
// */
//
///**
// * This class is adapted from the Road Runner StandardTrackingWheelLocalizer class. Later, this will
// * be replaced with a custom localizer. I made some minor changes, so I'm crediting myself as an
// * 'author' of sorts, but really this is pretty much Road Runner's code, just moved to be local to
// * Pedro Pathing to avoid having imports.
// *
// * @author Road Runner dev team
// * @author Anyi Lin - 10158 Scott's Bots
// * @version 1.0, 5/9/2024
// */
//@Config
//public class RoadRunnerThreeWheelLocalizer extends ThreeTrackingWheelLocalizer {
// public static double TICKS_PER_REV = 8192;
// public static double WHEEL_RADIUS = 1.37795; // in
// public static double GEAR_RATIO = 1; // output (wheel) speed / input (encoder) speed
//
// public static double X_MULTIPLIER = 0.5008239963;
// public static double Y_MULTIPLIER = 0.5018874659;
//
// public static double leftX = -18.5/25.4 - 0.1, leftY = 164.4/25.4, rightX = -18.4/25.4 - 0.1, rightY = -159.6/25.4, strafeX = -107.9/25.4+0.25, strafeY = -1.1/25.4-0.23;
//
// private RoadRunnerEncoder leftEncoder, rightEncoder, strafeEncoder;
//
// private List<Integer> lastEncPositions, lastEncVels;
//
// public RoadRunnerThreeWheelLocalizer(HardwareMap hardwareMap, List<Integer> lastTrackingEncPositions, List<Integer> lastTrackingEncVels) {
// super(Arrays.asList(
// new Pose2d(leftX, leftY, 0), // left
// new Pose2d(rightX, rightY, 0), // right
// new Pose2d(strafeX, strafeY, Math.toRadians(90)) // strafe
// ));
//
// lastEncPositions = lastTrackingEncPositions;
// lastEncVels = lastTrackingEncVels;
//
// // TODO: redo the configs here
// leftEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "leftRear"));
// rightEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "rightFront"));
// strafeEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "strafeEncoder"));
//
// // TODO: reverse any encoders using Encoder.setDirection(Encoder.Direction.REVERSE)
// leftEncoder.setDirection(RoadRunnerEncoder.Direction.REVERSE);
// rightEncoder.setDirection(RoadRunnerEncoder.Direction.REVERSE);
// strafeEncoder.setDirection(RoadRunnerEncoder.Direction.FORWARD);
// }
//
// public void resetHeading(double heading) {
// setPoseEstimate(new Pose2d(getPoseEstimate().getX(), getPoseEstimate().getY(), heading));
// }
//
// public static double encoderTicksToInches(double ticks) {
// return WHEEL_RADIUS * 2 * Math.PI * GEAR_RATIO * ticks / TICKS_PER_REV;
// }
//
// @NonNull
// @Override
// public List<Double> getWheelPositions() {
// int leftPos = leftEncoder.getCurrentPosition();
// int rightPos = rightEncoder.getCurrentPosition();
// int frontPos = strafeEncoder.getCurrentPosition();
//
// lastEncPositions.clear();
// lastEncPositions.add(leftPos);
// lastEncPositions.add(rightPos);
// lastEncPositions.add(frontPos);
//
// return Arrays.asList(
// encoderTicksToInches(leftPos) * X_MULTIPLIER,
// encoderTicksToInches(rightPos) * X_MULTIPLIER,
// encoderTicksToInches(frontPos) * Y_MULTIPLIER
// );
// }
//
// @NonNull
// @Override
// public List<Double> getWheelVelocities() {
// int leftVel = (int) leftEncoder.getCorrectedVelocity();
// int rightVel = (int) rightEncoder.getCorrectedVelocity();
// int frontVel = (int) strafeEncoder.getCorrectedVelocity();
//
// lastEncVels.clear();
// lastEncVels.add(leftVel);
// lastEncVels.add(rightVel);
// lastEncVels.add(frontVel);
//
// return Arrays.asList(
// encoderTicksToInches(leftVel) * X_MULTIPLIER,
// encoderTicksToInches(rightVel) * X_MULTIPLIER,
// encoderTicksToInches(frontVel) * Y_MULTIPLIER
// );
// }
//}

View File

@ -1,317 +0,0 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.PedroConstants.*;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the ThreeWheelIMULocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the three wheel odometry set up with the IMU to have more accurate heading
* readings. The diagram below, which is modified from Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || || |
* | || || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
*
* @author Logan Nash
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 7/9/2024
*/
@Config
public class ThreeWheelIMULocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder leftEncoder;
private Encoder rightEncoder;
private Encoder strafeEncoder;
private Pose leftEncoderPose;
private Pose rightEncoderPose;
private Pose strafeEncoderPose;
public final IMU imu;
private double previousIMUOrientation;
private double deltaRadians;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 0.0029;//8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double STRAFE_TICKS_TO_INCHES = 0.0029;//8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
public static double TURN_TICKS_TO_RADIANS = 0.0022;//8192 * 1.37795 * 2 * Math.PI * 0.5;
public static boolean useIMU = true;
/**
* This creates a new ThreeWheelIMULocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public ThreeWheelIMULocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new ThreeWheelIMULocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public ThreeWheelIMULocalizer(HardwareMap map, Pose setStartPose) {
hardwareMap = map;
imu = hardwareMap.get(IMU.class, IMU);
// TODO: replace this with your IMU's orientation
imu.initialize(new IMU.Parameters(new RevHubOrientationOnRobot(IMU_LOGO_FACING_DIRECTION, IMU_USB_FACING_DIRECTION)));
// TODO: replace these with your encoder positions
leftEncoderPose = new Pose(0, 6.19375, 0);
rightEncoderPose = new Pose(0, -6.19375, 0);
strafeEncoderPose = new Pose(-7, 0, Math.toRadians(90));
// TODO: replace these with your encoder ports
leftEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, LEFT_ENCODER));
rightEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, RIGHT_ENCODER));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary
leftEncoder.setDirection(LEFT_ENCODER_DIRECTION);
rightEncoder.setDirection(RIGHT_ENCODER_DIRECTION);
strafeEncoder.setDirection(BACK_ENCODER_DIRECTION);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
totalHeading = 0;
resetEncoders();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders. Then, the robot's global change in position is calculated
* using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano * Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders.
*/
public void updateEncoders() {
leftEncoder.update();
rightEncoder.update();
strafeEncoder.update();
double currentIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = MathFunctions.getTurnDirection(previousIMUOrientation, currentIMUOrientation) * MathFunctions.getSmallestAngleDifference(currentIMUOrientation, previousIMUOrientation);
previousIMUOrientation = currentIMUOrientation;
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
leftEncoder.reset();
rightEncoder.reset();
strafeEncoder.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * ((rightEncoder.getDeltaPosition() * leftEncoderPose.getY() - leftEncoder.getDeltaPosition() * rightEncoderPose.getY()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY()))));
// theta/turning
if (MathFunctions.getSmallestAngleDifference(0, deltaRadians) > 0.00005 && useIMU) {
returnMatrix.set(2, 0, deltaRadians);
} else {
returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((rightEncoder.getDeltaPosition() - leftEncoder.getDeltaPosition()) / (leftEncoderPose.getY() - rightEncoderPose.getY())));
}
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return TURN_TICKS_TO_RADIANS;
}
/**
* This resets the IMU.
*/
public void resetIMU() {
imu.resetYaw();
}
}

View File

@ -57,9 +57,9 @@ public class ThreeWheelLocalizer extends Localizer {
private Pose rightEncoderPose; private Pose rightEncoderPose;
private Pose strafeEncoderPose; private Pose strafeEncoderPose;
private double totalHeading; private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 0.0029;//8192 * 1.37795 * 2 * Math.PI * 0.5008239963; public static double FORWARD_TICKS_TO_INCHES = 0.003;//8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double STRAFE_TICKS_TO_INCHES = 0.0029;//8192 * 1.37795 * 2 * Math.PI * 0.5018874659; public static double STRAFE_TICKS_TO_INCHES = -0.003;//8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
public static double TURN_TICKS_TO_RADIANS = 0.003;//8192 * 1.37795 * 2 * Math.PI * 0.5; public static double TURN_TICKS_TO_RADIANS = 0.0029;//8192 * 1.37795 * 2 * Math.PI * 0.5;
/** /**
* This creates a new ThreeWheelLocalizer from a HardwareMap, with a starting Pose at (0,0) * This creates a new ThreeWheelLocalizer from a HardwareMap, with a starting Pose at (0,0)
@ -80,9 +80,9 @@ public class ThreeWheelLocalizer extends Localizer {
*/ */
public ThreeWheelLocalizer(HardwareMap map, Pose setStartPose) { public ThreeWheelLocalizer(HardwareMap map, Pose setStartPose) {
// TODO: replace these with your encoder positions // TODO: replace these with your encoder positions
leftEncoderPose = new Pose(0, 6.19375, 0); leftEncoderPose = new Pose(0.25, 6.25, 0);
rightEncoderPose = new Pose(0, -6.19375, 0); rightEncoderPose = new Pose(0.25, -6.25, 0);
strafeEncoderPose = new Pose(-7, 0, Math.toRadians(90)); strafeEncoderPose = new Pose(-7, 0.25, Math.toRadians(90));
hardwareMap = map; hardwareMap = map;
@ -92,9 +92,9 @@ public class ThreeWheelLocalizer extends Localizer {
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER)); strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary // TODO: reverse any encoders necessary
//leftEncoder.setDirection(Encoder.REVERSE); leftEncoder.setDirection(LEFT_ENCODER_DIRECTION);
// rightEncoder.setDirection(Encoder.REVERSE); rightEncoder.setDirection(RIGHT_ENCODER_DIRECTION);
//strafeEncoder.setDirection(Encoder.FORWARD); strafeEncoder.setDirection(BACK_ENCODER_DIRECTION);
setStartPose(setStartPose); setStartPose(setStartPose);
timer = new NanoTimer(); timer = new NanoTimer();

View File

@ -1,302 +0,0 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.LEFT_ENCODER;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the TwoWheelLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the two wheel odometry with IMU set up. The diagram below, which is modified from
* Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || || |
* | || || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
@Config
public class TwoWheelLocalizer extends Localizer { // todo: make two wheel odo work
private HardwareMap hardwareMap;
private IMU imu;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder forwardEncoder;
private Encoder strafeEncoder;
private Pose forwardEncoderPose;
private Pose strafeEncoderPose;
private double previousIMUOrientation;
private double deltaRadians;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5008239963;
public static double STRAFE_TICKS_TO_INCHES = 8192 * 1.37795 * 2 * Math.PI * 0.5018874659;
/**
* This creates a new TwoWheelLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public TwoWheelLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new TwoWheelLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public TwoWheelLocalizer(HardwareMap map, Pose setStartPose) {
// TODO: replace these with your encoder positions
forwardEncoderPose = new Pose(-18.5/25.4 - 0.1, 164.4/25.4, 0);
strafeEncoderPose = new Pose(-107.9/25.4+0.25, -1.1/25.4-0.23, Math.toRadians(90));
hardwareMap = map;
imu = hardwareMap.get(IMU.class, "imu");
// TODO: replace this with your IMU's orientation
imu.initialize(new IMU.Parameters(new RevHubOrientationOnRobot(RevHubOrientationOnRobot.LogoFacingDirection.UP, RevHubOrientationOnRobot.UsbFacingDirection.LEFT)));
// TODO: replace these with your encoder ports
forwardEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, LEFT_ENCODER));
strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary
forwardEncoder.setDirection(Encoder.REVERSE);
strafeEncoder.setDirection(Encoder.FORWARD);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
previousIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = 0;
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders and the IMU readings. Then, the robot's global change in
* position is calculated using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano * Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano * Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders as well as the IMU.
*/
public void updateEncoders() {
forwardEncoder.update();
strafeEncoder.update();
double currentIMUOrientation = MathFunctions.normalizeAngle(imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
deltaRadians = MathFunctions.getTurnDirection(previousIMUOrientation, currentIMUOrientation) * MathFunctions.getSmallestAngleDifference(currentIMUOrientation, previousIMUOrientation);
previousIMUOrientation = currentIMUOrientation;
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
forwardEncoder.reset();
strafeEncoder.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders and IMU.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * (forwardEncoder.getDeltaPosition() - forwardEncoderPose.getY() * deltaRadians));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (strafeEncoder.getDeltaPosition() - strafeEncoderPose.getX() * deltaRadians));
// theta/turning
returnMatrix.set(2,0, deltaRadians);
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return 1;
}
/**
* This resets the IMU.
*/
public void resetIMU() {
imu.resetYaw();
}
}

View File

@ -1,5 +1,9 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.tuning; package org.firstinspires.ftc.teamcode.pedroPathing.localization.tuning;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_LEFT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_RIGHT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_LEFT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_RIGHT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName; import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
@ -58,8 +62,10 @@ public class LocalizationTest extends OpMode {
rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName); rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName);
rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName); rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName);
leftFront.setDirection(DcMotorSimple.Direction.REVERSE); leftFront.setDirection(FRONT_LEFT_MOTOR_DIRECTION);
leftRear.setDirection(DcMotorSimple.Direction.REVERSE); leftRear.setDirection(BACK_LEFT_MOTOR_DIRECTION);
rightFront.setDirection(FRONT_RIGHT_MOTOR_DIRECTION);
rightRear.setDirection(BACK_RIGHT_MOTOR_DIRECTION);
motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear); motors = Arrays.asList(leftFront, leftRear, rightFront, rightRear);

View File

@ -40,9 +40,9 @@ public class FollowerConstants {
// Translational PIDF coefficients (don't use integral) // Translational PIDF coefficients (don't use integral)
public static CustomPIDFCoefficients translationalPIDFCoefficients = new CustomPIDFCoefficients( public static CustomPIDFCoefficients translationalPIDFCoefficients = new CustomPIDFCoefficients(
0.1, .25,
0, 0,
0.01, 0.0375,
0); 0);
// Translational Integral // Translational Integral
@ -60,7 +60,7 @@ public class FollowerConstants {
public static CustomPIDFCoefficients headingPIDFCoefficients = new CustomPIDFCoefficients( public static CustomPIDFCoefficients headingPIDFCoefficients = new CustomPIDFCoefficients(
2, 2,
0, 0,
0.025, 0.0375,
0); 0);
// Feed forward constant added on to the heading PIDF // Feed forward constant added on to the heading PIDF
@ -69,9 +69,9 @@ public class FollowerConstants {
// Drive PIDF coefficients // Drive PIDF coefficients
public static CustomFilteredPIDFCoefficients drivePIDFCoefficients = new CustomFilteredPIDFCoefficients( public static CustomFilteredPIDFCoefficients drivePIDFCoefficients = new CustomFilteredPIDFCoefficients(
0.006, 0.00375,
0, 0,
0.00001, 0.00003,
0.8, 0.8,
0); 0);

View File

@ -1,44 +0,0 @@
package org.firstinspires.ftc.teamcode.subsystem;
import androidx.annotation.NonNull;
import com.acmerobotics.dashboard.telemetry.TelemetryPacket;
import com.acmerobotics.roadrunner.Action;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/*
AutoLine# - This file does something of a path......
*/
public class AutoLine1 {
private PathChain pathChain;
private Pose autoLin1StartPose = new Pose(8,65);
public void moveToAutoLine1(Follower robot) {
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierLine(
new Point(8.000, 65.000, Point.CARTESIAN),
new Point(30.000, 72.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0));
pathChain = builder.build();
robot.setStartingPose(autoLin1StartPose);
robot.followPath(pathChain);
}
}

View File

@ -1,40 +0,0 @@
package org.firstinspires.ftc.teamcode.subsystem;
import androidx.annotation.NonNull;
import com.acmerobotics.dashboard.telemetry.TelemetryPacket;
import com.acmerobotics.roadrunner.Action;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/*
AutoLine# - This file does something of a path......
*/
public class AutoLine2 {
private PathChain pathChain;
public void moveToAutoLine2(Follower robot) {
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 2
new BezierCurve(
new Point(36.000, 72.000, Point.CARTESIAN),
new Point(24.000, 24.000, Point.CARTESIAN),
new Point(72.000, 36.000, Point.CARTESIAN),
new Point(56.000, 24.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(180));
pathChain = builder.build();
robot.followPath(pathChain);
}
}

View File

@ -1,73 +0,0 @@
package org.firstinspires.ftc.teamcode.subsystem;
import androidx.annotation.NonNull;
import com.acmerobotics.dashboard.telemetry.TelemetryPacket;
import com.acmerobotics.roadrunner.Action;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/*
AutoLine# - This file does something of a path......
*/
public class AutoLine3 implements Action {
private Follower actionRobot;
private PathChain pathChain;
private Pose startPose = new Pose(56,24);
public AutoLine3(Follower robot) {
this.actionRobot = robot;
this.actionRobot.setStartingPose(startPose);
PathBuilder builder = new PathBuilder();
builder
/* .addPath(
// Line 1
new BezierLine(
new Point(8.000, 65.000, Point.CARTESIAN),
new Point(30.000, 72.000, Point.CARTESIAN)
// This is the origional new Point(36.000, 72.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 2
new BezierCurve(
new Point(36.000, 72.000, Point.CARTESIAN),
new Point(24.000, 24.000, Point.CARTESIAN),
new Point(72.000, 36.000, Point.CARTESIAN),
new Point(56.000, 24.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(180)) */
.addPath(
// Line 3
new BezierLine(
new Point(56.000, 24.000, Point.CARTESIAN),
new Point(18.000, 24.000, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation();
pathChain = builder.build();
this.actionRobot.followPath(this.pathChain);
}
@Override
public boolean run(@NonNull TelemetryPacket telemetryPacket) {
this.actionRobot.update();
return this.actionRobot.isBusy();
}
}

View File

@ -0,0 +1,131 @@
package org.firstinspires.ftc.teamcode.subsystem;
import static org.firstinspires.ftc.teamcode.PedroConstants.LIFT_SLIDE_LEFT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.LIFT_SLIDE_RIGHT_MOTOR;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.util.ElapsedTime;
public class DualMotorSliderSubsystem {
/*
liftSlideLeft - Left Motor for Dual Linear Slide as a DcMotorEx object
Currently, the value of (liftSlideLeft) is null because we haven't assigned a value (object) to it yet.
It expects an object of type "DcMotorEx".
*/
private DcMotorEx liftSlideLeft;
/*
liftSlideRight - Right Motor for Dual Linear Slide as a DcMotorEx object
Currently, the value of (liftSlideRight) is null because we haven't assigned a value (object) to it yet.
It expects an object of type "DcMotorEx".
*/
private DcMotorEx liftSlideRight;
/*
targetPosition - Variable that holds target position of slides.
*/
private int targetPosition = 0;
/*
getTargetPosition/setTargetPosition - Best practice to "hide" (private) targetPosition and,
instead, use a "setter" to set the target position value or
a "getter" to get the target position value.
*/
public void setTargetPosition(int value) {
targetPosition = value;
}
private int getTargetPosition() { return targetPosition; }
/*
PID - Proportional/Integral/Derivative Values
For a dual motor linear slide, we only tune the P - Proportion.
The Proportion variable (kp) answers the question "how fast do we want to get to our destination?"
It's the only value we set because the variable ki and kd deal with how to handle when we're off the path.
Since we're going straight, we don't need to worry about.
*/
public final static double kp = 0.0015, ki = 0, kd = 0;
/*
lastError/integralSum/timer - These 3 variables are placeholders in determining how much
power to send to both motors.
*/
private double lastError = 0;
private double integralSum = 0;
private ElapsedTime timer = new ElapsedTime();
public DualMotorSliderSubsystem(HardwareMap hardwareMap) {
/*
liftSlideLeft/liftSlideRight - Now, we are assigning a value of DcMotorEx to each variable
We "assign" the object DcMotorEx to liftSlideLeft and liftSlideRight and "link them" to the
driver hub configuration name that matches the motor on the slide.
*/
liftSlideLeft = hardwareMap.get(DcMotorEx.class, LIFT_SLIDE_LEFT_MOTOR);
liftSlideRight = hardwareMap.get(DcMotorEx.class, LIFT_SLIDE_RIGHT_MOTOR);
}
public void init() {
/*
Initialize the motors with the following settings (assuming slide is at the very bottom position):
- Reset the encoders to be zero
- When the motor stops moving (zero power), brake. This means we can't move the motors, not even gravity.
- Sets the motor to run without the encoder. This doesn't mean we won't use the encoder values (because we will).
It just means to not FULLY depend on them, we will just prefer to use motor power instead.
*/
liftSlideLeft.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
liftSlideLeft.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
liftSlideLeft.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
liftSlideRight.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
liftSlideRight.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
liftSlideRight.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
}
/*
To calculate power, we send the targetPosition value (how high we want the slider motors to
be based on motor 'ticks') to calculatePower function.
For every loop that occurs, it will constantly calculate power. So long as we're far away from
our "target" position, we will get as much power as possible. The closer we get, the lower the
power we will receive.
*/
public void update() {
double power = calculatePower();
liftSlideLeft.setPower(power);
liftSlideRight.setPower(power);
}
/*
Calculating power - To calculate the power, we determine the proportion, derivative and
integral of our closed loop system.
For more information, please visit:
- Introduction to Closed Loop System:
- https://www.ctrlaltftc.com/introduction-to-closed-loop-control
- The PID controller:
- https://www.ctrlaltftc.com/the-pid-controller
*/
private double calculatePower() {
double error = getTargetPosition() - liftSlideLeft.getCurrentPosition();
integralSum += error * timer.seconds();
double derivative = (error - lastError) / timer.seconds();
lastError = error;
timer.reset();
return (error * kp) + (derivative * kd) + (integralSum * ki);
}
public void toLowBucketPosition() {
setTargetPosition(1500);
}
public void toHighBucketPosition() {
setTargetPosition(3000);
}
}