232 Commits

Author SHA1 Message Date
18470fe415 Getting parking to work again as path 6b. Original path during competition was not working properly. 2025-02-08 15:20:32 -08:00
933d33bf43 Autoclaw added 2025-02-06 16:51:35 -08:00
2974904109 Changes that covered the hang and wrist touching bar during auto 2025-02-06 16:50:13 -08:00
47facdde5e Xander's dualslides changes 2025-02-04 16:59:54 -08:00
9618bb7b29 Changed so it cant do bad things 2025-02-04 10:42:39 -08:00
15c561cd69 Merge remote-tracking branch 'origin/branch-swiss-cheese' into branch-swiss-cheese
# Conflicts:
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/cometbots/CometBotTeleOpDevelopment.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/configs/RobotConstants.java
2025-02-03 10:03:59 -08:00
3bcde94416 commit fixed arm bugs 2025-02-02 15:05:36 -08:00
1831b6621c Commit fixed wrist floor/pickup toggle and eliminated slide creeping error 2025-02-02 15:04:17 -08:00
b0db84a61c Hang Successful 2025-01-31 13:53:10 -08:00
b5c7379e00 Merge remote-tracking branch 'origin/branch-swiss-cheese' into branch-swiss-cheese 2025-01-30 15:46:40 -08:00
440a57dbf4 Hang Successful 2025-01-30 15:46:31 -08:00
7900c95e82 Hang Successful 2025-01-30 15:45:36 -08:00
7dda91af9c Merge remote-tracking branch 'origin/branch-swiss-cheese' into branch-swiss-cheese 2025-01-30 15:38:52 -08:00
78195ed0f6 Specimen States written 2025-01-30 15:38:36 -08:00
884e232b2b fixed bug 2025-01-29 20:44:57 -08:00
dd01c4ce1a Commit Hanging Function in the new TeleOp 2025-01-29 19:09:28 -08:00
edff580e84 Specimen States written 2025-01-29 15:06:38 -08:00
f8f83a1228 Merge remote-tracking branch 'origin/branch-swiss-cheese' into branch-swiss-cheese 2025-01-29 12:40:29 -08:00
f70213af27 hang test 2025-01-28 20:28:25 -08:00
7376c79cdd Fixed floor position issue 2025-01-28 17:13:58 -08:00
44448d642d Added teleop code 2025-01-28 17:10:36 -08:00
373dfd849a We added teleop methods 2025-01-28 17:09:43 -08:00
90ff225bfa working drive code arm needs work 2025-01-27 16:52:16 -08:00
73c0f137ad Working three samples in bucket auto with touching low bar park 2025-01-27 15:53:21 -08:00
5932d44350 Almost perfect, just need to get path 6 working properly 2025-01-26 12:51:21 -08:00
a36f40be45 Added new paths for autonomous 2025-01-26 12:32:43 -08:00
fe44a38cdd Merge remote-tracking branch 'origin/branch-swiss-cheese' into branch-swiss-cheese 2025-01-23 17:47:20 -08:00
f03515a169 Added new paths for autonomous 2025-01-23 17:47:00 -08:00
7b373b7c26 unfinished hang changes 2025-01-23 17:22:36 -08:00
7dcdde3b49 Merge remote-tracking branch 'origin/branch-swiss-cheese' into branch-swiss-cheese 2025-01-23 16:41:21 -08:00
3f08d9b00b fix bug 2025-01-23 16:40:52 -08:00
90355add55 new edit 2025-01-23 16:28:44 -08:00
fed445b171 Fixed floor position issue 2025-01-23 16:28:30 -08:00
59f06440d3 Merge remote-tracking branch 'origin/branch-swiss-cheese' into branch-swiss-cheese 2025-01-23 16:18:42 -08:00
939a36e138 Fixed floor position issue 2025-01-23 16:18:24 -08:00
b14e91b094 Commit Hang Subsystem new 2025-01-23 16:12:59 -08:00
70d0a17d75 Commit Hang Subsystem 2025-01-23 15:47:35 -08:00
2be683701b Merge remote-tracking branch 'origin/branch-swiss-cheese' into branch-swiss-cheese
# Conflicts:
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/subsystem/HangMotorSubsystem.java
2025-01-23 15:46:12 -08:00
52e21fd468 Commit Hang Subsystem 2025-01-23 15:45:20 -08:00
839837c844 Commit Hang Subsystem 2025-01-23 10:52:56 -08:00
71b91fa3ff random 2025-01-22 12:31:31 -08:00
a08dd82de2 commit new states for reverse drops 2025-01-22 12:16:28 -08:00
2cb8ce41dd Path two working to pick up 1st specimen 2025-01-21 17:10:30 -08:00
93ff65ee53 Path one, backwards, works as expected 2025-01-21 16:32:18 -08:00
e1be70c23f Need path tuning high basket auto path 1 (I need oranges) 2025-01-20 12:41:48 -08:00
2a1513a2ba Commit Init Positions 2025-01-20 09:39:50 -08:00
172c6659dc Fixed cLaw Bug 2025-01-20 09:02:52 -08:00
4975e0f5ca Fixed States 2025-01-20 08:51:42 -08:00
1a878eea1c Updated Blueberry Specimen Grabing code and new values. Reprogrammed a few servos also and I haven't tweaked the code yet. The Blueberry Specimen Grab is UNTESTED. CAUTION 2025-01-16 17:07:41 -08:00
8c6ce96ae4 New TeleOp 2025-01-16 08:01:31 -08:00
39e416e35a Commit dumb change 2024-12-29 11:28:27 -08:00
86ba993794 Commit working arm tests 2024-12-27 10:21:45 -08:00
8814dc906d Updating name 2024-12-27 02:24:51 -08:00
ccd7b55bc1 Fine tuning values 2024-12-27 02:24:41 -08:00
1ee5645dd1 Working wrist, for the most part 2024-12-27 02:24:18 -08:00
e26f0f0127 Delete dupe'd file 2024-12-27 02:00:39 -08:00
e85c503b5f Updated Follower to use PedroConstants 2024-12-27 02:00:27 -08:00
349ba28dc1 Fixed typo 2024-12-27 01:58:14 -08:00
4a57ed6bc3 Downgraded AGP to a compatible version 2024-12-27 01:57:39 -08:00
81743521d1 Thumb is a work in progress 2024-12-27 01:55:11 -08:00
d890699ef5 Fixed open/closed values for claw 2024-12-27 01:38:39 -08:00
95f20096b9 Version 1 of claws without actions nor thumb 2024-12-27 01:37:31 -08:00
1d8b44d453 Merge remote-tracking branch 'pedro-qs/master' into branch-silver-14493-v2
# Conflicts:
#	FtcRobotController/src/main/AndroidManifest.xml
#	FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptAprilTagLocalization.java
#	README.md
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/LOCALIZATION.md
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/OVERVIEW.md
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/TUNING.md
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/DriveVectorScaler.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/follower/Follower.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Localizer.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/Pose.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/PoseUpdater.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/ThreeWheelLocalizer.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LateralTuner.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/tuning/LocalizationTest.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/MathFunctions.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/PathBuilder.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/pathGeneration/Point.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/FollowerConstants.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardVelocityTuner.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/ForwardZeroPowerAccelerationTuner.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/LateralZeroPowerAccelerationTuner.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/StrafeVelocityTuner.java
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/util/Drawing.java
#	build.dependencies.gradle
2024-12-26 18:31:46 -08:00
047d0fa3c3 Dual Motor Slide Subsystem 2024-12-25 15:09:39 -08:00
c63319f9c4 Simplification of class files 2024-12-25 15:08:36 -08:00
c824580b33 Moving class to test package 2024-12-25 15:07:48 -08:00
1180cb1035 Merge pull request #29 from junkjunk123/patch-3
Update Follower.java
2024-12-24 16:29:48 -05:00
e27e884650 Merge pull request #28 from Invicta8110/master
Merge FTC SDK v10.1.1
2024-12-24 16:29:10 -05:00
883906885b Fixed imports and static names 2024-12-23 16:32:53 -08:00
5595fcccd4 Removing unnecessary files 2024-12-23 16:32:21 -08:00
81514f34b4 Update Follower.java
Made the follower optionally take in a second parameter for the localizer
2024-12-23 09:35:30 -08:00
917c794ef6 Merge remote-tracking branch 'upstream/master'
# Conflicts:
#	FtcRobotController/src/main/AndroidManifest.xml
#	README.md
#	build.dependencies.gradle
2024-12-23 11:43:03 -05:00
06431bb98f Pinpoint is fixed Huzzah!!!
Update PinpointLocalizer.java
2024-12-22 23:59:33 -05:00
11bad4df76 Update PinpointLocalizer.java 2024-12-22 18:56:55 -08:00
75a2e31f57 Update PinpointLocalizer.java 2024-12-22 18:53:02 -08:00
4d6aafc1dd Update PinpointLocalizer.java 2024-12-22 18:51:46 -08:00
61f0b8afe9 Update TwoWheelPinpointIMULocalizer.java
Makes position of dead wheels less confusing
2024-12-21 16:24:07 -06:00
07e903d148 Update README.md 2024-12-20 20:05:23 -06:00
a2cb7981ca Merge pull request #26 from junkjunk123/patch-1 2024-12-19 12:53:48 -08:00
db79dc38dc Update TwoWheelPinpointIMULocalizer.java 2024-12-19 12:52:26 -08:00
38856c90dc Update TwoWheelPinpointIMULocalizer.java 2024-12-19 12:48:00 -08:00
3e79d86443 Finalize tunings 2024-12-18 20:42:14 -08:00
c9ffd4f061 Added driver hub config naming 2024-12-17 21:35:16 -08:00
021dfa7222 Added lift configurations 2024-12-17 21:34:49 -08:00
233b177cf6 Removing more files to keep things light for this branch 2024-12-17 21:32:10 -08:00
0ab402af0f Lightening the load and configured the robot with appropriate values (encoders/motors) 2024-12-17 19:07:34 -08:00
e4f0447312 changing documentation to match FollowerConstants motor name/direction update 2024-12-15 18:35:23 -05:00
f8ae3ccf4b Merge pull request #17 from monkbroc/motor-direction
Extract motor direction to constants so they are set in one place
2024-12-15 18:24:20 -05:00
9264472cf9 Merge remote-tracking branch 'origin/master' 2024-12-15 18:15:29 -05:00
45da3275da makes limiting the max power better 2024-12-15 18:15:18 -05:00
c04de0428e makes limiting the max power better 2024-12-15 18:14:56 -05:00
a2d896b67c Merge pull request #22 from Logan-Nash/master
Fixed loop times on OTOS localizer. Updated the pinpoint to the lates…
2024-12-10 10:42:11 -05:00
f4942715d9 Fixed loop times on OTOS localizer. Updated the pinpoint to the latest driver 2024-12-10 10:40:22 -05:00
aa496b8237 Ollie's work committed 2024-12-08 12:48:30 -08:00
d34d56d567 Merge pull request #20 from junkjunk123/master
Patches to the PinpointLocalizer
2024-12-06 13:44:57 -05:00
9330c22213 Update PinpointLocalizer.java 2024-12-06 09:36:06 -08:00
7471de24d2 Update Follower.java 2024-12-06 09:20:27 -08:00
8a55b00c42 Update Localizer.java 2024-12-06 09:19:48 -08:00
7bc4dbbd34 Update PoseUpdater.java 2024-12-06 09:18:37 -08:00
fd5d6b716b Update PinpointLocalizer.java 2024-12-06 09:17:29 -08:00
bdc0de4eeb Merge pull request #19 from Logan-Nash/master
Commented out PinpointLocalizer.java
2024-12-06 10:46:03 -05:00
460639127b Commented out PinpointLocalizer.java , Too many issues for now. will uncomment when proven test is complete 2024-12-06 10:41:02 -05:00
b0fc4bb0ad Merge pull request #18 from junkjunk123/master
Start position heading fix on TwoWheelPinpointIMU
2024-12-05 21:52:06 -05:00
6b601b9dd4 Start position heading fix on TwoWheelPinpointIMU 2024-12-05 18:08:59 -08:00
a0b3b76210 Extract motor direction to constants so they are set in one place 2024-11-29 10:05:08 -05:00
58fd8d4a56 Merge pull request #16 from BaronClaps/master
Two Wheel & Pinpoint IMU Localizer
2024-11-29 02:53:22 -05:00
22dba85a5f Update TwoWheelPinpointIMULocalizer.java 2024-11-28 23:16:24 -06:00
3b47dc4269 Update TwoWheelPinpointIMULocalizer.java 2024-11-28 23:15:08 -06:00
ea4dd2e9c1 Merge branch 'AnyiLin:master' into master 2024-11-28 20:49:44 -06:00
58ab21d86d TwoWheel + PinpointIMU Localizer 2024-11-28 20:48:25 -06:00
fdc7a9f979 makes it brake after ending the velocity tuner 2024-11-28 12:38:43 -05:00
a4ab959de0 Merge pull request #14 from BaronClaps/master
Make distance to be consistently 48 inches
2024-11-28 02:50:41 -05:00
f14c3f0873 Make distance to be 48 inches on push tests and velocity 2024-11-27 12:34:57 -06:00
9765f23493 Merge pull request #13 from Iris-TheRainbow/patch-1
Correct mistakes in roadrunner description
2024-11-26 04:55:32 -05:00
659a22b33e fix total heading issue in PinpointLocalizer 2024-11-26 04:54:23 -05:00
6d4cd0f2f5 Merge remote-tracking branch 'origin/master'
# Conflicts:
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/localization/localizers/DriveEncoderLocalizer.java
2024-11-26 01:28:29 -05:00
354c822b92 drive encoder direction fix 2024-11-26 01:27:59 -05:00
97bb1faf3b drive encoder direction fix 2024-11-26 00:18:34 -05:00
6441222924 Correct mistakes in roadrunner description 2024-11-25 19:13:16 -06:00
17f0ebaea5 fixed velocity calculations in localizers 2024-11-22 21:58:40 -05:00
fb056103d9 fix velocity inaccuracy in TUNING.md 2024-11-22 14:24:38 -05:00
40f7b3391f added a reminder to reverse motors properly in TUNING.MD 2024-11-22 03:13:25 -05:00
089fc3ae68 moved velocity calculations to the localizers 2024-11-21 17:13:30 -05:00
9711a268e5 hopefully fixed the PinpointLocalizer 2024-11-21 17:09:44 -05:00
437635838b added functionality to IMU methods in PoseUpdater 2024-11-21 15:35:42 -05:00
ea381fa92c realigning class comment on PinpointLocalizer 2024-11-21 02:06:59 -05:00
c8ba9a09db Merge pull request #11 from Marlstar/master
Clarifications in the localization and tuning processes
2024-11-21 01:56:06 -05:00
775e7ffbc8 Clarify translational PIDF tuning process
Some quirks in both the process of tuning the translational PIDF and the documentation of it led to confusion for myself and others.
- Noted that the telemetry message that the robot will move forward and backward can be ignored for the moment
- Noted that the robot should only move when pushed
- Noted that the robot will only correct to a line in the original forward direction, not to the starting point
2024-11-17 14:42:31 +11:00
1056fe0fc6 Rotate robot diagrams in localizer files to reduce confusion 2024-11-17 13:36:33 +10:00
66f3339e26 Working Pre loaded auto! Can score 11 points consistently! 2024-11-14 17:10:46 -08:00
ad0a8d3374 Merge remote-tracking branch 'origin/branch-silver-14493' into branch-silver-14493 2024-11-14 16:23:46 -08:00
dc71eb4317 Feature where driver can override centricity (robot vs field). 2024-11-13 15:09:59 -08:00
94144780b8 Tentative fix for robot/runBlocking problem as per issue #2 2024-11-13 09:13:24 -08:00
a362d2e004 Massive upgrade and shift of files 2024-11-12 23:16:23 -08:00
2008c3cd88 Working somehitng i don't know 2024-11-12 16:55:35 -08:00
ee16f8e657 Merge pull request #1250 from FIRST-Tech-Challenge/20241102-092223-release-candidate
FtcRobotController v10.1.1
2024-11-06 13:31:57 +09:00
c5be3cd932 Silver Branch Code 2024-11-03 09:13:37 -08:00
9f11128c61 FtcRobotController v10.1 2024-11-02 10:16:56 +09:00
7d83b9c254 Add sample cometbot package 2024-10-31 15:37:09 -07:00
04c02fb36e Merge pull request #10 from TBHGodPro/master
Multiple usability patches
2024-10-28 19:56:48 -04:00
2530445d0d Multiple usability patches 2024-10-28 17:39:03 -05:00
85b8daed3d Merge pull request #9 from TBHGodPro/master
Improve OTOSLocalizer and PinpointLocalizer performance, add rotate Pose method
2024-10-27 16:23:53 -04:00
4e18dae336 Improve OTOSLocalizer and PinpointLocalizer performance 2024-10-26 10:57:10 -05:00
6830a82403 Added fixes to external odometry processors.
Signed-off-by: Logan-Nash <logannash@Mac.attlocal.net>
2024-10-25 11:54:29 -04:00
5f50d053c5 Merge remote-tracking branch 'origin/branch-silver-14493' into branch-silver-14493
# Conflicts:
#	TeamCode/src/main/java/org/firstinspires/ftc/teamcode/pedroPathing/tuning/FollowerConstants.java
2024-10-21 21:57:45 -07:00
c5f33af419 small fix to PinpointLocalizer 2024-10-17 21:56:08 -04:00
1b27bab64b Merge remote-tracking branch 'origin/master' 2024-10-17 14:57:05 -04:00
f5ac5ea8e9 Added Pinpoint Localization! 2024-10-17 14:09:35 -04:00
0cfb57c643 Merge remote-tracking branch 'origin/branch-silver-14493' into branch-silver-14493 2024-10-17 11:07:25 -07:00
50db1f9175 Add new files (2nd try) 2024-10-17 11:07:10 -07:00
552bb3e25a Add new files 2024-10-17 11:06:13 -07:00
c1076a832c Rebased changes 2024-10-17 11:05:12 -07:00
7bf8b0c357 Add files 2024-10-17 11:04:02 -07:00
adfab3e8af Add sample code 2024-10-15 15:44:49 -07:00
0f7ea50907 Updated constants for silver and it, somehow, works 2024-10-15 15:43:53 -07:00
239f168540 Retuned to success, PIDF (P = .05) and translationalPIDFeedForward is back to 0 2024-10-15 13:59:02 -07:00
e0a96df549 optimized OTOSLocalizer thanks to j5155 2024-10-10 03:46:24 -04:00
f2bcdcc55a Add constants 2024-10-01 17:05:54 -07:00
e08aac773d Static entries for arm servos 2024-10-01 15:45:02 -07:00
3950a83ac1 Added encoder naming and usage 2024-10-01 10:19:40 -07:00
552ff3f339 Updated to 10.1 2024-09-30 22:37:16 -07:00
4eef485dab Re-wired and reconfigured 2024-09-30 20:39:24 -07:00
ce3ae6c03b OTOSLocalizer fix 2024-09-29 00:08:57 -04:00
6e82dc98df version 10.1 sdk 2024-09-21 02:52:11 -04:00
91d5984af7 FtcRobotController v10.1 2024-09-21 02:51:21 -04:00
6af9bb6534 Merge pull request #1074 from FIRST-Tech-Challenge/20240919-122750-release-candidate
FtcRobotController v10.1
2024-09-20 11:54:13 -07:00
156423422c Latest configurations for black competition robot 2024-09-19 17:17:53 -07:00
054017df61 FtcRobotController v10.1 2024-09-19 13:38:50 -07:00
e534f46efb Added IMU direction 2024-09-17 18:55:37 -07:00
fce14b3753 Added more variables to Constants file 2024-09-17 18:46:26 -07:00
4ed1c8c615 Tuned localizer for Competition Robot 2024-09-17 17:16:31 -07:00
2594ff79ca Update values for Competition robot 2024-09-17 15:56:40 -07:00
fa49b48441 Incorporate static class variables for ease of modification 2024-09-16 07:48:17 -07:00
d9d0f11de7 A bit of format changes 2024-09-14 12:19:44 -07:00
4dc7b493fa Added more static entries for ease of configuration 2024-09-14 12:16:06 -07:00
a149909e82 Added more useful links 2024-09-14 12:15:49 -07:00
dbe9c76ee6 Renamed README markdown 2024-09-14 12:15:41 -07:00
e72326506f small typo 2024-09-14 03:14:52 -04:00
d41f368fe8 Extract values so it's easier to manipulate 2024-09-13 15:44:05 -07:00
51c3e0bd93 Remove unused files for Pedro Pathing 2024-09-13 15:17:18 -07:00
ac1be6387b Commit instance of Pedro Pathing 2024-09-13 15:01:04 -07:00
315e3b4240 Add sample file to debug on-board IMU 2024-09-13 13:40:21 -07:00
fb570d6be8 Static-ization of variables that are modified constantly 2024-09-13 13:36:51 -07:00
a0126ba1a2 A bit of cleanup 2024-09-13 13:21:47 -07:00
9553332b3e v10.0 sdk 2024-09-10 14:39:24 -04:00
151ead49d2 v10.0 sdk 2024-09-10 14:25:57 -04:00
f91cf1a8bf you can now reset the IMU! 2024-09-07 17:36:52 -04:00
442c867b93 Update README.md 2024-09-07 09:38:48 -07:00
f75e498a55 Merge pull request #1038 from FIRST-Tech-Challenge/20240828-111152-release-candidate
FtcRobotController v10.0
2024-09-07 09:19:53 -07:00
2d8df47c59 pathEndTimeoutConstraint is in milliseconds, not seconds 2024-09-06 01:44:14 -04:00
cfc6964799 can now use SparkFunOTOSCorrected 2024-09-04 20:57:29 -04:00
fb7f160a9f Updated motor/encoder names with that of SCDS Robot configuration 2024-08-29 12:24:10 -07:00
7158b61d94 FtcRobotController v10.0 2024-08-28 12:49:37 -07:00
425c001a1b github name update 2024-08-27 20:24:23 -04:00
74422baed4 oopsie 2024-08-27 20:06:12 -04:00
94bad19674 made the robot odo pod diagram more readable 2024-08-27 10:05:53 -04:00
93db7dcdfa changing motor configuration is easier and readme updates 2024-08-27 01:32:09 -04:00
13e503c730 centripetal correction oops 2024-08-13 01:04:21 -04:00
87f42407f7 centripetal correction 2024-08-11 15:50:08 -04:00
2c28cda121 new path visualizer by team 16166 watt's up! 2024-08-09 17:59:13 -04:00
e849fd8e1e simplifying the PID system 2024-08-07 21:39:27 -04:00
4a669f770f simplifying the PID system 2024-08-06 23:00:02 -04:00
22d38b1289 hopefully fixes any issues with the class 2024-07-31 15:23:57 -04:00
1703f88159 better teleop drive robot/field centric functionality 2024-07-30 00:33:55 -04:00
b7ddb3465a teleop enhancements fix 2024-07-29 22:15:07 -04:00
bee9f02577 teleop enhancements fix 2024-07-26 22:12:02 -04:00
a823aa7af7 small misc changes 2024-07-21 22:52:25 -04:00
70cfd3be44 updated readmes for the drive pid changes and the OTOS localizer 2024-07-21 12:54:47 -04:00
09ae88b27e refactored the localizer files and added an OTOS localizer 2024-07-21 00:14:20 -04:00
7b8adf25bf work mostly done on kalman filter 2024-07-20 00:56:24 -04:00
67f56a9931 updated to the 9.2 FTC SDK and working on adding the kalman filter and filtered pidf to the drive pid 2024-07-18 22:34:39 -04:00
14b5f267fb Added new localizer that uses three tracking wheels, with the IMU for most heading tracking. Also fixed the math for the teleop centripetal force correction with help from @junkjunk123 on Discord 2024-07-13 23:09:55 -04:00
ce2009b8b4 Create static.yml 2024-06-08 02:52:42 -04:00
41c09e0103 localizer readme done 2024-05-18 16:36:17 -04:00
bfab6dc7d0 more fixes 2024-05-17 10:04:28 -04:00
252cc304ad minor fixes and todo removed 2024-05-14 09:33:53 -04:00
0f304e912d fixed gradle 2024-05-14 09:19:14 -04:00
52f3cb91ae disabled road runner localizer adapters to reduce number of gradle dependencies 2024-05-14 08:39:48 -04:00
168537cb28 added comments to all new files and increased FTC Dashboard field drawing functionality, will add a readme for the localization section 2024-05-13 23:15:20 -04:00
ccc74c8120 uncommented Road Runner adapter files 2024-05-09 23:12:32 -04:00
2468dbb58a Adapter for Road Runner added for easier transition to Pedro Pathing localizer, as well as adding previously omitted files 2024-05-09 23:00:12 -04:00
55b4b6b7f1 fixed centripetal force correction issues and added localizer stuff. an interface for the roadrunner localizer will come in the next commit 2024-05-09 21:40:36 -04:00
bdf183fe62 fixed path parameter issues 2024-05-05 20:32:06 -04:00
f8fd55da43 oops, should be good now 2024-04-29 21:24:29 -04:00
99b02f52e9 yay 2024-04-28 22:10:23 -04:00
3b92cb79b2 Merge pull request #1 from 21229QualityControl/master
Make the build successful
2024-04-28 20:32:30 -04:00
28c3ee97fe Make it successfully build 2024-04-28 17:24:54 -07:00
2ba5639bbc centripetal force correction fix 2024-03-30 22:47:57 -04:00
3af80e243b centripetal force correction fix 2024-03-29 16:58:38 -04:00
61615feacf minor change to tuning readme 2024-03-25 20:36:00 -04:00
f2c524b59f first commit 2024-03-24 23:37:27 -04:00
4bb3a5ad61 first commit 2024-03-24 23:30:39 -04:00
161 changed files with 16067 additions and 1880 deletions

43
.github/workflows/static.yml vendored Normal file
View File

@ -0,0 +1,43 @@
# Simple workflow for deploying static content to GitHub Pages
name: Deploy static content to Pages
on:
# Runs on pushes targeting the default branch
push:
branches: ["master"]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
permissions:
contents: read
pages: write
id-token: write
# Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
# However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
concurrency:
group: "pages"
cancel-in-progress: false
jobs:
# Single deploy job since we're just deploying
deploy:
environment:
name: github-pages
url: ${{ steps.deployment.outputs.page_url }}
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Pages
uses: actions/configure-pages@v5
- name: Upload artifact
uses: actions/upload-pages-artifact@v3
with:
# Upload entire repository
path: '.'
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

5
.gitignore vendored
View File

@ -78,7 +78,4 @@ lint/intermediates/
lint/generated/
lint/outputs/
lint/tmp/
# lint/reports/
.DS_Store
/.idea
# lint/reports/

View File

@ -14,7 +14,11 @@ android {
buildConfigField "String", "APP_BUILD_TIME", '"' + (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(new Date())) + '"'
}
compileSdkVersion 29
buildFeatures {
buildConfig = true
}
compileSdkVersion 30
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:versionCode="54"
android:versionName="9.2">
android:versionCode="57"
android:versionName="10.1.1">
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />

View File

@ -99,7 +99,7 @@ public class BasicOmniOpMode_Linear extends LinearOpMode {
rightFrontDrive.setDirection(DcMotor.Direction.FORWARD);
rightBackDrive.setDirection(DcMotor.Direction.FORWARD);
// Wait for the game to start (driver presses PLAY)
// Wait for the game to start (driver presses START)
telemetry.addData("Status", "Initialized");
telemetry.update();

View File

@ -83,14 +83,14 @@ public class BasicOpMode_Iterative extends OpMode
}
/*
* Code to run REPEATEDLY after the driver hits INIT, but before they hit PLAY
* Code to run REPEATEDLY after the driver hits INIT, but before they hit START
*/
@Override
public void init_loop() {
}
/*
* Code to run ONCE when the driver hits PLAY
* Code to run ONCE when the driver hits START
*/
@Override
public void start() {
@ -98,7 +98,7 @@ public class BasicOpMode_Iterative extends OpMode
}
/*
* Code to run REPEATEDLY after the driver hits PLAY but before they hit STOP
* Code to run REPEATEDLY after the driver hits START but before they hit STOP
*/
@Override
public void loop() {

View File

@ -76,7 +76,7 @@ public class BasicOpMode_Linear extends LinearOpMode {
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
// Wait for the game to start (driver presses PLAY)
// Wait for the game to start (driver presses START)
waitForStart();
runtime.reset();

View File

@ -88,7 +88,7 @@ public class ConceptAprilTag extends LinearOpMode {
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.addData(">", "Touch START to start OpMode");
telemetry.update();
waitForStart();

View File

@ -84,7 +84,7 @@ public class ConceptAprilTagEasy extends LinearOpMode {
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.addData(">", "Touch START to start OpMode");
telemetry.update();
waitForStart();

View File

@ -0,0 +1,247 @@
/* Copyright (c) 2024 Dryw Wade. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.robotcore.external.navigation.Position;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import java.util.List;
/*
* This OpMode illustrates the basics of AprilTag based localization.
*
* For an introduction to AprilTags, see the FTC-DOCS link below:
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
*
* In this sample, any visible tag ID will be detected and displayed, but only tags that are included in the default
* "TagLibrary" will be used to compute the robot's location and orientation. This default TagLibrary contains
* the current Season's AprilTags and a small set of "test Tags" in the high number range.
*
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the robot, relative to the field origin.
* This information is provided in the "robotPose" member of the returned "detection".
*
* To learn about the Field Coordinate System that is defined for FTC (and used by this OpMode), see the FTC-DOCS link below:
* https://ftc-docs.firstinspires.org/en/latest/game_specific_resources/field_coordinate_system/field-coordinate-system.html
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: AprilTag Localization", group = "Concept")
@Disabled
public class ConceptAprilTagLocalization extends LinearOpMode {
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
/**
* Variables to store the position and orientation of the camera on the robot. Setting these
* values requires a definition of the axes of the camera and robot:
*
* Camera axes:
* Origin location: Center of the lens
* Axes orientation: +x right, +y down, +z forward (from camera's perspective)
*
* Robot axes (this is typical, but you can define this however you want):
* Origin location: Center of the robot at field height
* Axes orientation: +x right, +y forward, +z upward
*
* Position:
* If all values are zero (no translation), that implies the camera is at the center of the
* robot. Suppose your camera is positioned 5 inches to the left, 7 inches forward, and 12
* inches above the ground - you would need to set the position to (-5, 7, 12).
*
* Orientation:
* If all values are zero (no rotation), that implies the camera is pointing straight up. In
* most cases, you'll need to set the pitch to -90 degrees (rotation about the x-axis), meaning
* the camera is horizontal. Use a yaw of 0 if the camera is pointing forwards, +90 degrees if
* it's pointing straight left, -90 degrees for straight right, etc. You can also set the roll
* to +/-90 degrees if it's vertical, or 180 degrees if it's upside-down.
*/
private Position cameraPosition = new Position(DistanceUnit.INCH,
0, 0, 0, 0);
private YawPitchRollAngles cameraOrientation = new YawPitchRollAngles(AngleUnit.DEGREES,
0, -90, 0, 0);
/**
* The variable to store our instance of the AprilTag processor.
*/
private AprilTagProcessor aprilTag;
/**
* The variable to store our instance of the vision portal.
*/
private VisionPortal visionPortal;
@Override
public void runOpMode() {
initAprilTag();
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch START to start OpMode");
telemetry.update();
waitForStart();
while (opModeIsActive()) {
telemetryAprilTag();
// Push telemetry to the Driver Station.
telemetry.update();
// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}
// Share the CPU.
sleep(20);
}
// Save more CPU resources when camera is no longer needed.
visionPortal.close();
} // end method runOpMode()
/**
* Initialize the AprilTag processor.
*/
private void initAprilTag() {
// Create the AprilTag processor.
aprilTag = new AprilTagProcessor.Builder()
// The following default settings are available to un-comment and edit as needed.
//.setDrawAxes(false)
//.setDrawCubeProjection(false)
//.setDrawTagOutline(true)
//.setTagFamily(AprilTagProcessor.TagFamily.TAG_36h11)
//.setTagLibrary(AprilTagGameDatabase.getCenterStageTagLibrary())
//.setOutputUnits(DistanceUnit.INCH, AngleUnit.DEGREES)
.setCameraPose(cameraPosition, cameraOrientation)
// == CAMERA CALIBRATION ==
// If you do not manually specify calibration parameters, the SDK will attempt
// to load a predefined calibration for your camera.
//.setLensIntrinsics(578.272, 578.272, 402.145, 221.506)
// ... these parameters are fx, fy, cx, cy.
.build();
// Adjust Image Decimation to trade-off detection-range for detection-rate.
// eg: Some typical detection data using a Logitech C920 WebCam
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second (default)
// Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second (default)
// Note: Decimation can be changed on-the-fly to adapt during a match.
//aprilTag.setDecimation(3);
// Create the vision portal by using a builder.
VisionPortal.Builder builder = new VisionPortal.Builder();
// Set the camera (webcam vs. built-in RC phone camera).
if (USE_WEBCAM) {
builder.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"));
} else {
builder.setCamera(BuiltinCameraDirection.BACK);
}
// Choose a camera resolution. Not all cameras support all resolutions.
//builder.setCameraResolution(new Size(640, 480));
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
//builder.enableLiveView(true);
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
// Choose whether or not LiveView stops if no processors are enabled.
// If set "true", monitor shows solid orange screen if no processors enabled.
// If set "false", monitor shows camera view without annotations.
//builder.setAutoStopLiveView(false);
// Set and enable the processor.
builder.addProcessor(aprilTag);
// Build the Vision Portal, using the above settings.
visionPortal = builder.build();
// Disable or re-enable the aprilTag processor at any time.
//visionPortal.setProcessorEnabled(aprilTag, true);
} // end method initAprilTag()
/**
* Add telemetry about AprilTag detections.
*/
private void telemetryAprilTag() {
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
telemetry.addData("# AprilTags Detected", currentDetections.size());
// Step through the list of detections and display info for each one.
for (AprilTagDetection detection : currentDetections) {
if (detection.metadata != null) {
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)",
detection.robotPose.getPosition().x,
detection.robotPose.getPosition().y,
detection.robotPose.getPosition().z));
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)",
detection.robotPose.getOrientation().getPitch(AngleUnit.DEGREES),
detection.robotPose.getOrientation().getRoll(AngleUnit.DEGREES),
detection.robotPose.getOrientation().getYaw(AngleUnit.DEGREES)));
} else {
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
}
} // end for() loop
// Add "key" information to telemetry
telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");
} // end method telemetryAprilTag()
} // end class

View File

@ -98,7 +98,7 @@ public class ConceptAprilTagOptimizeExposure extends LinearOpMode
// Wait for the match to begin.
telemetry.addData("Camera preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.addData(">", "Touch START to start OpMode");
telemetry.update();
waitForStart();

View File

@ -77,7 +77,7 @@ public class ConceptAprilTagSwitchableCameras extends LinearOpMode {
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.addData(">", "Touch START to start OpMode");
telemetry.update();
waitForStart();

View File

@ -83,7 +83,7 @@ public class ConceptExternalHardwareClass extends LinearOpMode {
robot.init();
// Send telemetry message to signify robot waiting;
// Wait for the game to start (driver presses PLAY)
// Wait for the game to start (driver presses START)
waitForStart();
// run until the end of the match (driver presses STOP)

View File

@ -0,0 +1,123 @@
package org.firstinspires.ftc.robotcontroller.external.samples;
/*
Copyright (c) 2021-24 Alan Smith
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted (subject to the limitations in the disclaimer below) provided that
the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of Alan Smith nor the names of its contributors may be used to
endorse or promote products derived from this software without specific prior
written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESSFOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
import android.graphics.Color;
import com.qualcomm.hardware.sparkfun.SparkFunLEDStick;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.util.Range;
/*
* This OpMode illustrates how to use the SparkFun QWIIC LED Strip
*
* This is a simple way to add a strip of 10 LEDs to your robot where you can set the color of each
* LED or the whole strip. This allows for driver feedback or even just fun ways to show your team
* colors.
*
* Why?
* Because more LEDs == more fun!!
*
* This OpMode assumes that the QWIIC LED Stick is attached to an I2C interface named "back_leds" in the robot configuration.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*
* You can buy this product here: https://www.sparkfun.com/products/18354
* Don't forget to also buy this to make it easy to connect to your Control or Expansion Hub:
* https://www.sparkfun.com/products/25596
*/
@TeleOp(name = "Concept: LED Stick", group = "Concept")
@Disabled
public class ConceptLEDStick extends OpMode {
private boolean wasUp;
private boolean wasDown;
private int brightness = 5; // this needs to be between 0 and 31
private final static double END_GAME_TIME = 120 - 30;
private SparkFunLEDStick ledStick;
@Override
public void init() {
ledStick = hardwareMap.get(SparkFunLEDStick.class, "back_leds");
ledStick.setBrightness(brightness);
ledStick.setColor(Color.GREEN);
}
@Override
public void start() {
resetRuntime();
}
@Override
public void loop() {
telemetry.addLine("Hold the A button to turn blue");
telemetry.addLine("Hold the B button to turn red");
telemetry.addLine("Hold the left bumper to turn off");
telemetry.addLine("Use DPAD Up/Down to change brightness");
if (getRuntime() > END_GAME_TIME) {
int[] ledColors = {Color.RED, Color.YELLOW, Color.RED, Color.YELLOW, Color.RED,
Color.YELLOW, Color.RED, Color.YELLOW, Color.RED, Color.YELLOW};
ledStick.setColors(ledColors);
} else if (gamepad1.a) {
ledStick.setColor(Color.BLUE);
} else if (gamepad1.b) {
ledStick.setColor(Color.RED);
} else if (gamepad1.left_bumper) {
ledStick.turnAllOff();
} else {
ledStick.setColor(Color.GREEN);
}
/*
* Use DPAD up and down to change brightness
*/
int newBrightness = brightness;
if (gamepad1.dpad_up && !wasUp) {
newBrightness = brightness + 1;
} else if (gamepad1.dpad_down && !wasDown) {
newBrightness = brightness - 1;
}
if (newBrightness != brightness) {
brightness = Range.clip(newBrightness, 0, 31);
ledStick.setBrightness(brightness);
}
telemetry.addData("Brightness", brightness);
wasDown = gamepad1.dpad_down;
wasUp = gamepad1.dpad_up;
}
}

View File

@ -110,7 +110,7 @@ public class ConceptMotorBulkRead extends LinearOpMode {
ElapsedTime timer = new ElapsedTime();
telemetry.addData(">", "Press play to start tests");
telemetry.addData(">", "Press START to start tests");
telemetry.addData(">", "Test results will update for each access method.");
telemetry.update();
waitForStart();

View File

@ -53,7 +53,7 @@ public class ConceptNullOp extends OpMode {
/**
* This method will be called repeatedly during the period between when
* the init button is pressed and when the play button is pressed (or the
* the INIT button is pressed and when the START button is pressed (or the
* OpMode is stopped).
*/
@Override
@ -61,7 +61,7 @@ public class ConceptNullOp extends OpMode {
}
/**
* This method will be called once, when the play button is pressed.
* This method will be called once, when the START button is pressed.
*/
@Override
public void start() {
@ -70,7 +70,7 @@ public class ConceptNullOp extends OpMode {
/**
* This method will be called repeatedly during the period between when
* the play button is pressed and when the OpMode is stopped.
* the START button is pressed and when the OpMode is stopped.
*/
@Override
public void loop() {

View File

@ -0,0 +1,78 @@
/*
Copyright (c) 2024 Alan Smith
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted (subject to the limitations in the disclaimer below) provided that
the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of Alan Smith nor the names of its contributors may be used to
endorse or promote products derived from this software without specific prior
written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESSFOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
/*
* This OpMode illustrates how to use the REV Digital Indicator
*
* This is a simple way to add the REV Digital Indicator which has a red and green LED.
* (and as you may remember, red + green = yellow so when they are both on you can get yellow)
*
* Why?
* You can use this to show information to the driver. For example, green might be that you can
* pick up more game elements and red would be that you already have the possession limit.
*
* This OpMode assumes that the REV Digital Indicator is setup as 2 Digital Channels named
* front_led_green and front_led_red. (the green should be the lower of the 2 channels it is plugged
* into and the red should be the higher)
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*
* You can buy this product here: https://www.revrobotics.com/rev-31-2010/
*/
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.LED;
@TeleOp(name = "Concept: RevLED", group = "Concept")
@Disabled
public class ConceptRevLED extends OpMode {
LED frontLED_red;
LED frontLED_green;
@Override
public void init() {
frontLED_green = hardwareMap.get(LED.class, "front_led_green");
frontLED_red = hardwareMap.get(LED.class, "front_led_red");
}
@Override
public void loop() {
if (gamepad1.a) {
frontLED_red.on();
} else {
frontLED_red.off();
}
if (gamepad1.b) {
frontLED_green.on();
} else {
frontLED_green.off();
}
}
}

View File

@ -38,8 +38,9 @@ import com.qualcomm.robotcore.util.Range;
/*
* This OpMode executes a basic Tank Drive Teleop for a two wheeled robot using two REV SPARKminis.
* To use this example, connect two REV SPARKminis into servo ports on the Expansion Hub. On the
* This OpMode demonstrates a POV Drive system, with commented-out code for a Tank Drive system,
* for a two wheeled robot using two REV SPARKminis.
* To use this example, connect two REV SPARKminis into servo ports on the Control Hub. On the
* robot configuration, use the drop down list under 'Servos' to select 'REV SPARKmini Controller'
* and name them 'left_drive' and 'right_drive'.
*
@ -62,8 +63,7 @@ public class ConceptRevSPARKMini extends LinearOpMode {
telemetry.update();
// Initialize the hardware variables. Note that the strings used here as parameters
// to 'get' must correspond to the names assigned during the robot configuration
// step (using the FTC Robot Controller app on the phone).
// to 'get' must correspond to the names assigned during robot configuration.
leftDrive = hardwareMap.get(DcMotorSimple.class, "left_drive");
rightDrive = hardwareMap.get(DcMotorSimple.class, "right_drive");
@ -72,7 +72,7 @@ public class ConceptRevSPARKMini extends LinearOpMode {
leftDrive.setDirection(DcMotorSimple.Direction.FORWARD);
rightDrive.setDirection(DcMotorSimple.Direction.REVERSE);
// Wait for the game to start (driver presses PLAY)
// Wait for the game to start (driver presses START)
waitForStart();
runtime.reset();

View File

@ -100,7 +100,7 @@ public class ConceptSoundsASJava extends LinearOpMode {
telemetry.addData("gold resource", goldFound ? "Found" : "NOT found\n Add gold.wav to /src/main/res/raw" );
telemetry.addData("silver resource", silverFound ? "Found" : "Not found\n Add silver.wav to /src/main/res/raw" );
// Wait for the game to start (driver presses PLAY)
// Wait for the game to start (driver presses START)
telemetry.addData(">", "Press Start to continue");
telemetry.update();
waitForStart();

View File

@ -0,0 +1,191 @@
/*
* Copyright (c) 2024 Phil Malone
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.util.Size;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.util.SortOrder;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.opencv.ColorBlobLocatorProcessor;
import org.firstinspires.ftc.vision.opencv.ColorRange;
import org.firstinspires.ftc.vision.opencv.ImageRegion;
import org.opencv.core.RotatedRect;
import java.util.List;
/*
* This OpMode illustrates how to use a video source (camera) to locate specifically colored regions
*
* Unlike a "color sensor" which determines the color of an object in the field of view, this "color locator"
* will search the Region Of Interest (ROI) in a camera image, and find any "blobs" of color that match the requested color range.
* These blobs can be further filtered and sorted to find the one most likely to be the item the user is looking for.
*
* To perform this function, a VisionPortal runs a ColorBlobLocatorProcessor process.
* The ColorBlobLocatorProcessor process is created first, and then the VisionPortal is built to use this process.
* The ColorBlobLocatorProcessor analyses the ROI and locates pixels that match the ColorRange to form a "mask".
* The matching pixels are then collected into contiguous "blobs" of pixels. The outer boundaries of these blobs are called its "contour".
* For each blob, the process then creates the smallest possible rectangle "boxFit" that will fully encase the contour.
* The user can then call getBlobs() to retrieve the list of Blobs, where each Blob contains the contour and the boxFit data.
* Note: The default sort order for Blobs is ContourArea, in descending order, so the biggest contours are listed first.
*
* To aid the user, a colored boxFit rectangle is drawn on the camera preview to show the location of each Blob
* The original Blob contour can also be added to the preview. This is helpful when configuring the ColorBlobLocatorProcessor parameters.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@Disabled
@TeleOp(name = "Concept: Vision Color-Locator", group = "Concept")
public class ConceptVisionColorLocator extends LinearOpMode
{
@Override
public void runOpMode()
{
/* Build a "Color Locator" vision processor based on the ColorBlobLocatorProcessor class.
* - Specify the color range you are looking for. You can use a predefined color, or create you own color range
* .setTargetColorRange(ColorRange.BLUE) // use a predefined color match
* Available predefined colors are: RED, BLUE YELLOW GREEN
* .setTargetColorRange(new ColorRange(ColorSpace.YCrCb, // or define your own color match
* new Scalar( 32, 176, 0),
* new Scalar(255, 255, 132)))
*
* - Focus the color locator by defining a RegionOfInterest (ROI) which you want to search.
* This can be the entire frame, or a sub-region defined using:
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
* Use one form of the ImageRegion class to define the ROI.
* ImageRegion.entireFrame()
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner
* ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5) 50% width/height square centered on screen
*
* - Define which contours are included.
* You can get ALL the contours, or you can skip any contours that are completely inside another contour.
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.ALL_FLATTENED_HIERARCHY) // return all contours
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude contours inside other contours
* note: EXTERNAL_ONLY helps to avoid bright reflection spots from breaking up areas of solid color.
*
* - turn the display of contours ON or OFF. Turning this on helps debugging but takes up valuable CPU time.
* .setDrawContours(true)
*
* - include any pre-processing of the image or mask before looking for Blobs.
* There are some extra processing you can include to improve the formation of blobs. Using these features requires
* an understanding of how they may effect the final blobs. The "pixels" argument sets the NxN kernel size.
* .setBlurSize(int pixels) Blurring an image helps to provide a smooth color transition between objects, and smoother contours.
* The higher the number of pixels, the more blurred the image becomes.
* Note: Even "pixels" values will be incremented to satisfy the "odd number" requirement.
* Blurring too much may hide smaller features. A "pixels" size of 5 is good for a 320x240 image.
* .setErodeSize(int pixels) Erosion removes floating pixels and thin lines so that only substantive objects remain.
* Erosion can grow holes inside regions, and also shrink objects.
* "pixels" in the range of 2-4 are suitable for low res images.
* .setDilateSize(int pixels) Dilation makes objects more visible by filling in small holes, making lines appear thicker,
* and making filled shapes appear larger. Dilation is useful for joining broken parts of an
* object, such as when removing noise from an image.
* "pixels" in the range of 2-4 are suitable for low res images.
*/
ColorBlobLocatorProcessor colorLocator = new ColorBlobLocatorProcessor.Builder()
.setTargetColorRange(ColorRange.BLUE) // use a predefined color match
.setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude blobs inside blobs
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5)) // search central 1/4 of camera view
.setDrawContours(true) // Show contours on the Stream Preview
.setBlurSize(5) // Smooth the transitions between different colors in image
.build();
/*
* Build a vision portal to run the Color Locator process.
*
* - Add the colorLocator process created above.
* - Set the desired video resolution.
* Since a high resolution will not improve this process, choose a lower resolution that is
* supported by your camera. This will improve overall performance and reduce latency.
* - Choose your video source. This may be
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
* or
* .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera
*/
VisionPortal portal = new VisionPortal.Builder()
.addProcessor(colorLocator)
.setCameraResolution(new Size(320, 240))
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.build();
telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging.
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
// WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode.
while (opModeIsActive() || opModeInInit())
{
telemetry.addData("preview on/off", "... Camera Stream\n");
// Read the current list
List<ColorBlobLocatorProcessor.Blob> blobs = colorLocator.getBlobs();
/*
* The list of Blobs can be filtered to remove unwanted Blobs.
* Note: All contours will be still displayed on the Stream Preview, but only those that satisfy the filter
* conditions will remain in the current list of "blobs". Multiple filters may be used.
*
* Use any of the following filters.
*
* ColorBlobLocatorProcessor.Util.filterByArea(minArea, maxArea, blobs);
* A Blob's area is the number of pixels contained within the Contour. Filter out any that are too big or small.
* Start with a large range and then refine the range based on the likely size of the desired object in the viewfinder.
*
* ColorBlobLocatorProcessor.Util.filterByDensity(minDensity, maxDensity, blobs);
* A blob's density is an indication of how "full" the contour is.
* If you put a rubber band around the contour you would get the "Convex Hull" of the contour.
* The density is the ratio of Contour-area to Convex Hull-area.
*
* ColorBlobLocatorProcessor.Util.filterByAspectRatio(minAspect, maxAspect, blobs);
* A blob's Aspect ratio is the ratio of boxFit long side to short side.
* A perfect Square has an aspect ratio of 1. All others are > 1
*/
ColorBlobLocatorProcessor.Util.filterByArea(50, 20000, blobs); // filter out very small blobs.
/*
* The list of Blobs can be sorted using the same Blob attributes as listed above.
* No more than one sort call should be made. Sorting can use ascending or descending order.
* ColorBlobLocatorProcessor.Util.sortByArea(SortOrder.DESCENDING, blobs); // Default
* ColorBlobLocatorProcessor.Util.sortByDensity(SortOrder.DESCENDING, blobs);
* ColorBlobLocatorProcessor.Util.sortByAspectRatio(SortOrder.DESCENDING, blobs);
*/
telemetry.addLine(" Area Density Aspect Center");
// Display the size (area) and center location for each Blob.
for(ColorBlobLocatorProcessor.Blob b : blobs)
{
RotatedRect boxFit = b.getBoxFit();
telemetry.addLine(String.format("%5d %4.2f %5.2f (%3d,%3d)",
b.getContourArea(), b.getDensity(), b.getAspectRatio(), (int) boxFit.center.x, (int) boxFit.center.y));
}
telemetry.update();
sleep(50);
}
}
}

View File

@ -0,0 +1,136 @@
/*
* Copyright (c) 2024 Phil Malone
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.graphics.Color;
import android.util.Size;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.opencv.ImageRegion;
import org.firstinspires.ftc.vision.opencv.PredominantColorProcessor;
/*
* This OpMode illustrates how to use a video source (camera) as a color sensor
*
* A "color sensor" will typically determine the color of the object that it is pointed at.
*
* This sample performs the same function, except it uses a video camera to inspect an object or scene.
* The user may choose to inspect all, or just a Region of Interest (ROI), of the active camera view.
* The user must also provide a list of "acceptable colors" (Swatches) from which the closest matching color will be selected.
*
* To perform this function, a VisionPortal runs a PredominantColorProcessor process.
* The PredominantColorProcessor process is created first, and then the VisionPortal is built to use this process.
* The PredominantColorProcessor analyses the ROI and splits the colored pixels into several color-clusters.
* The largest of these clusters is then considered to be the "Predominant Color"
* The process then matches the Predominant Color with the closest Swatch and returns that match.
*
* To aid the user, a colored rectangle is drawn on the camera preview to show the RegionOfInterest,
* The Predominant Color is used to paint the rectangle border, so the user can verify that the color is reasonable.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@Disabled
@TeleOp(name = "Concept: Vision Color-Sensor", group = "Concept")
public class ConceptVisionColorSensor extends LinearOpMode
{
@Override
public void runOpMode()
{
/* Build a "Color Sensor" vision processor based on the PredominantColorProcessor class.
*
* - Focus the color sensor by defining a RegionOfInterest (ROI) which you want to inspect.
* This can be the entire frame, or a sub-region defined using:
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
* Use one form of the ImageRegion class to define the ROI.
* ImageRegion.entireFrame()
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner
* ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1) 10% width/height square centered on screen
*
* - Set the list of "acceptable" color swatches (matches).
* Only colors that you assign here will be returned.
* If you know the sensor will be pointing to one of a few specific colors, enter them here.
* Or, if the sensor may be pointed randomly, provide some additional colors that may match the surrounding.
* Possible choices are:
* RED, ORANGE, YELLOW, GREEN, CYAN, BLUE, PURPLE, MAGENTA, BLACK, WHITE;
*
* Note that in the example shown below, only some of the available colors are included.
* This will force any other colored region into one of these colors.
* eg: Green may be reported as YELLOW, as this may be the "closest" match.
*/
PredominantColorProcessor colorSensor = new PredominantColorProcessor.Builder()
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1))
.setSwatches(
PredominantColorProcessor.Swatch.RED,
PredominantColorProcessor.Swatch.BLUE,
PredominantColorProcessor.Swatch.YELLOW,
PredominantColorProcessor.Swatch.BLACK,
PredominantColorProcessor.Swatch.WHITE)
.build();
/*
* Build a vision portal to run the Color Sensor process.
*
* - Add the colorSensor process created above.
* - Set the desired video resolution.
* Since a high resolution will not improve this process, choose a lower resolution that is
* supported by your camera. This will improve overall performance and reduce latency.
* - Choose your video source. This may be
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
* or
* .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera
*/
VisionPortal portal = new VisionPortal.Builder()
.addProcessor(colorSensor)
.setCameraResolution(new Size(320, 240))
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.build();
telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging.
// WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode.
while (opModeIsActive() || opModeInInit())
{
telemetry.addData("DS preview on/off", "3 dots, Camera Stream\n");
// Request the most recent color analysis.
// This will return the closest matching colorSwatch and the predominant RGB color.
// Note: to take actions based on the detected color, simply use the colorSwatch in a comparison or switch.
// eg:
// if (result.closestSwatch == PredominantColorProcessor.Swatch.RED) {... some code ...}
PredominantColorProcessor.Result result = colorSensor.getAnalysis();
// Display the Color Sensor result.
telemetry.addData("Best Match:", result.closestSwatch);
telemetry.addLine(String.format("R %3d, G %3d, B %3d", Color.red(result.rgb), Color.green(result.rgb), Color.blue(result.rgb)));
telemetry.update();
sleep(20);
}
}
}

View File

@ -110,7 +110,7 @@ public class RobotAutoDriveByEncoder_Linear extends LinearOpMode {
rightDrive.getCurrentPosition());
telemetry.update();
// Wait for the game to start (driver presses PLAY)
// Wait for the game to start (driver presses START)
waitForStart();
// Step through each leg of the path,

View File

@ -65,7 +65,7 @@ import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
* Notes:
*
* All angles are referenced to the coordinate-frame that is set whenever resetHeading() is called.
* In this sample, the heading is reset when the Start button is touched on the Driver station.
* In this sample, the heading is reset when the Start button is touched on the Driver Station.
* Note: It would be possible to reset the heading after each move, but this would accumulate steering errors.
*
* The angle of movement/rotation is assumed to be a standardized rotation around the robot Z axis,
@ -124,15 +124,15 @@ public class RobotAutoDriveByGyro_Linear extends LinearOpMode {
// These constants define the desired driving/control characteristics
// They can/should be tweaked to suit the specific robot drive train.
static final double DRIVE_SPEED = 0.4; // Max driving speed for better distance accuracy.
static final double TURN_SPEED = 0.2; // Max Turn speed to limit turn rate
static final double TURN_SPEED = 0.2; // Max turn speed to limit turn rate.
static final double HEADING_THRESHOLD = 1.0 ; // How close must the heading get to the target before moving to next step.
// Requiring more accuracy (a smaller number) will often make the turn take longer to get into the final position.
// Define the Proportional control coefficient (or GAIN) for "heading control".
// We define one value when Turning (larger errors), and the other is used when Driving straight (smaller errors).
// Increase these numbers if the heading does not corrects strongly enough (eg: a heavy robot or using tracks)
// Increase these numbers if the heading does not correct strongly enough (eg: a heavy robot or using tracks)
// Decrease these numbers if the heading does not settle on the correct value (eg: very agile robot with omni wheels)
static final double P_TURN_GAIN = 0.02; // Larger is more responsive, but also less stable
static final double P_DRIVE_GAIN = 0.03; // Larger is more responsive, but also less stable
static final double P_TURN_GAIN = 0.02; // Larger is more responsive, but also less stable.
static final double P_DRIVE_GAIN = 0.03; // Larger is more responsive, but also less stable.
@Override
@ -317,7 +317,7 @@ public class RobotAutoDriveByGyro_Linear extends LinearOpMode {
* <p>
* Move will stop once the requested time has elapsed
* <p>
* This function is useful for giving the robot a moment to stabilize it's heading between movements.
* This function is useful for giving the robot a moment to stabilize its heading between movements.
*
* @param maxTurnSpeed Maximum differential turn speed (range 0 to +1.0)
* @param heading Absolute Heading Angle (in Degrees) relative to last gyro reset.

View File

@ -85,10 +85,10 @@ public class RobotAutoDriveByTime_Linear extends LinearOpMode {
telemetry.addData("Status", "Ready to run"); //
telemetry.update();
// Wait for the game to start (driver presses PLAY)
// Wait for the game to start (driver presses START)
waitForStart();
// Step through each leg of the path, ensuring that the Auto mode has not been stopped along the way
// Step through each leg of the path, ensuring that the OpMode has not been stopped along the way.
// Step 1: Drive forward for 3 seconds
leftDrive.setPower(FORWARD_SPEED);

View File

@ -64,7 +64,7 @@ import java.util.concurrent.TimeUnit;
* The code assumes a Robot Configuration with motors named: leftfront_drive and rightfront_drive, leftback_drive and rightback_drive.
* The motor directions must be set so a positive power goes forward on all wheels.
* This sample assumes that the current game AprilTag Library (usually for the current season) is being loaded by default,
* so you should choose to approach a valid tag ID (usually starting at 0)
* so you should choose to approach a valid tag ID.
*
* Under manual control, the left stick will move forward/back & left/right. The right stick will rotate the robot.
* Manually drive the robot until it displays Target data on the Driver Station.
@ -95,12 +95,12 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
// Set the GAIN constants to control the relationship between the measured position error, and how much power is
// applied to the drive motors to correct the error.
// Drive = Error * Gain Make these values smaller for smoother control, or larger for a more aggressive response.
final double SPEED_GAIN = 0.02 ; // Forward Speed Control "Gain". eg: Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
final double STRAFE_GAIN = 0.015 ; // Strafe Speed Control "Gain". eg: Ramp up to 25% power at a 25 degree Yaw error. (0.25 / 25.0)
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". eg: Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
final double SPEED_GAIN = 0.02 ; // Forward Speed Control "Gain". e.g. Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
final double STRAFE_GAIN = 0.015 ; // Strafe Speed Control "Gain". e.g. Ramp up to 37% power at a 25 degree Yaw error. (0.375 / 25.0)
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". e.g. Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
final double MAX_AUTO_SPEED = 0.5; // Clip the approach speed to this max value (adjust for your robot)
final double MAX_AUTO_STRAFE= 0.5; // Clip the approach speed to this max value (adjust for your robot)
final double MAX_AUTO_STRAFE= 0.5; // Clip the strafing speed to this max value (adjust for your robot)
final double MAX_AUTO_TURN = 0.3; // Clip the turn speed to this max value (adjust for your robot)
private DcMotor leftFrontDrive = null; // Used to control the left front drive wheel
@ -145,7 +145,7 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
// Wait for driver to press start
telemetry.addData("Camera preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.addData(">", "Touch START to start OpMode");
telemetry.update();
waitForStart();
@ -259,7 +259,7 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
aprilTag = new AprilTagProcessor.Builder().build();
// Adjust Image Decimation to trade-off detection-range for detection-rate.
// eg: Some typical detection data using a Logitech C920 WebCam
// e.g. Some typical detection data using a Logitech C920 WebCam
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second

View File

@ -63,7 +63,7 @@ import java.util.concurrent.TimeUnit;
* The code assumes a Robot Configuration with motors named left_drive and right_drive.
* The motor directions must be set so a positive power goes forward on both wheels;
* This sample assumes that the default AprilTag Library (usually for the current season) is being loaded by default
* so you should choose to approach a valid tag ID (usually starting at 0)
* so you should choose to approach a valid tag ID.
*
* Under manual control, the left stick will move forward/back, and the right stick will rotate the robot.
* This is called POV Joystick mode, different than Tank Drive (where each joystick controls a wheel).
@ -94,8 +94,8 @@ public class RobotAutoDriveToAprilTagTank extends LinearOpMode
// Set the GAIN constants to control the relationship between the measured position error, and how much power is
// applied to the drive motors to correct the error.
// Drive = Error * Gain Make these values smaller for smoother control, or larger for a more aggressive response.
final double SPEED_GAIN = 0.02 ; // Speed Control "Gain". eg: Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". eg: Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
final double SPEED_GAIN = 0.02 ; // Speed Control "Gain". e.g. Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". e.g. Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
final double MAX_AUTO_SPEED = 0.5; // Clip the approach speed to this max value (adjust for your robot)
final double MAX_AUTO_TURN = 0.25; // Clip the turn speed to this max value (adjust for your robot)
@ -135,7 +135,7 @@ public class RobotAutoDriveToAprilTagTank extends LinearOpMode
// Wait for the driver to press Start
telemetry.addData("Camera preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.addData(">", "Touch START to start OpMode");
telemetry.update();
waitForStart();
@ -234,7 +234,7 @@ public class RobotAutoDriveToAprilTagTank extends LinearOpMode
aprilTag = new AprilTagProcessor.Builder().build();
// Adjust Image Decimation to trade-off detection-range for detection-rate.
// eg: Some typical detection data using a Logitech C920 WebCam
// e.g. Some typical detection data using a Logitech C920 WebCam
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second

View File

@ -106,7 +106,7 @@ public class RobotAutoDriveToLine_Linear extends LinearOpMode {
// A gain of 15 causes a Rev Color Sensor V2 to produce an Alpha value of 1.0 at about 1.5" above the floor.
colorSensor.setGain(15);
// Wait for driver to press PLAY)
// Wait for driver to press START)
// Abort this loop is started or stopped.
while (opModeInInit()) {

View File

@ -96,10 +96,10 @@ public class RobotTeleopPOV_Linear extends LinearOpMode {
rightClaw.setPosition(MID_SERVO);
// Send telemetry message to signify robot waiting;
telemetry.addData(">", "Robot Ready. Press Play."); //
telemetry.addData(">", "Robot Ready. Press START."); //
telemetry.update();
// Wait for the game to start (driver presses PLAY)
// Wait for the game to start (driver presses START)
waitForStart();
// run until the end of the match (driver presses STOP)

View File

@ -94,25 +94,25 @@ public class RobotTeleopTank_Iterative extends OpMode{
rightClaw.setPosition(MID_SERVO);
// Send telemetry message to signify robot waiting;
telemetry.addData(">", "Robot Ready. Press Play."); //
telemetry.addData(">", "Robot Ready. Press START."); //
}
/*
* Code to run REPEATEDLY after the driver hits INIT, but before they hit PLAY
* Code to run REPEATEDLY after the driver hits INIT, but before they hit START
*/
@Override
public void init_loop() {
}
/*
* Code to run ONCE when the driver hits PLAY
* Code to run ONCE when the driver hits START
*/
@Override
public void start() {
}
/*
* Code to run REPEATEDLY after the driver hits PLAY but before they hit STOP
* Code to run REPEATEDLY after the driver hits START but before they hit STOP
*/
@Override
public void loop() {

View File

@ -33,13 +33,10 @@ THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.hardware.dfrobot.HuskyLens;
import com.qualcomm.hardware.rev.Rev2mDistanceSensor;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DistanceSensor;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.robotcore.internal.system.Deadline;
import java.util.concurrent.TimeUnit;
@ -51,6 +48,9 @@ import java.util.concurrent.TimeUnit;
* detect a number of predefined objects and AprilTags in the 36h11 family, can
* recognize colors, and can be trained to detect custom objects. See this website for
* documentation: https://wiki.dfrobot.com/HUSKYLENS_V1.0_SKU_SEN0305_SEN0336
*
* For detailed instructions on how a HuskyLens is used in FTC, please see this tutorial:
* https://ftc-docs.firstinspires.org/en/latest/devices/huskylens/huskylens.html
*
* This sample illustrates how to detect AprilTags, but can be used to detect other types
* of objects by changing the algorithm. It assumes that the HuskyLens is configured with
@ -110,6 +110,8 @@ public class SensorHuskyLens extends LinearOpMode {
* Users, should, in general, explicitly choose the algorithm they want to use
* within the OpMode by calling selectAlgorithm() and passing it one of the values
* found in the enumeration HuskyLens.Algorithm.
*
* Other algorithm choices for FTC might be: OBJECT_RECOGNITION, COLOR_RECOGNITION or OBJECT_CLASSIFICATION.
*/
huskyLens.selectAlgorithm(HuskyLens.Algorithm.TAG_RECOGNITION);
@ -141,6 +143,15 @@ public class SensorHuskyLens extends LinearOpMode {
telemetry.addData("Block count", blocks.length);
for (int i = 0; i < blocks.length; i++) {
telemetry.addData("Block", blocks[i].toString());
/*
* Here inside the FOR loop, you could save or evaluate specific info for the currently recognized Bounding Box:
* - blocks[i].width and blocks[i].height (size of box, in pixels)
* - blocks[i].left and blocks[i].top (edges of box)
* - blocks[i].x and blocks[i].y (center location)
* - blocks[i].id (Color ID)
*
* These values have Java type int (integer).
*/
}
telemetry.update();

View File

@ -58,7 +58,7 @@ import static com.qualcomm.hardware.rev.RevHubOrientationOnRobot.xyzOrientation;
* planes (X/Y, X/Z or Y/Z) OR that the Hub has only been rotated in a range of 90 degree increments.
*
* Note: if your Hub is mounted Orthogonally (on a orthogonal surface, angled at some multiple of
* 90 Degrees) then you should use the simpler SensorImuOrthogonal sample in this folder.
* 90 Degrees) then you should use the simpler SensorIMUOrthogonal sample in this folder.
*
* But... If your Hub is mounted Non-Orthogonally, you must specify one or more rotational angles
* that transform a "Default" Hub orientation into your desired orientation. That is what is
@ -94,6 +94,9 @@ public class SensorIMUNonOrthogonal extends LinearOpMode
* 1) Hub laying flat on a horizontal surface, with the Printed Logo facing UP
* 2) Rotated such that the USB ports are facing forward on the robot.
*
* If you are using a REV External IMU, the "Default" orientation is the same as for a REV Hub, but instead of
* the USB ports facing forward, the I2C port faces forward.
*
* The order that the rotations are performed matters, so this sample shows doing them in the order X, Y, then Z.
* For specifying non-orthogonal hub mounting orientations, we must temporarily use axes
* defined relative to the Hub itself, instead of the usual Robot Coordinate System axes
@ -124,7 +127,7 @@ public class SensorIMUNonOrthogonal extends LinearOpMode
*
* To get the "Default" hub into this configuration you would just need a single rotation, but around a different axis.
* 1) No rotation around the X or Y axes.
* 1) Rotate the Hub -30 degrees (Clockwise) around the Z axis, since a positive angle would be Counter Clockwise.
* 2) Rotate the Hub -30 degrees (Clockwise) around the Z axis, since a positive angle would be Counter Clockwise.
*
* So the X,Y,Z rotations would be 0,0,-30
*

View File

@ -55,7 +55,7 @@ import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
* (X/Y, X/Z or Y/Z) and that the Hub has only been rotated in a range of 90 degree increments.
*
* Note: if your Hub is mounted on a surface angled at some non-90 Degree multiple (like 30) look at
* the alternative SensorImuNonOrthogonal sample in this folder.
* the alternative SensorIMUNonOrthogonal sample in this folder.
*
* This "Orthogonal" requirement means that:
*
@ -98,6 +98,9 @@ public class SensorIMUOrthogonal extends LinearOpMode
* The first parameter specifies the direction the printed logo on the Hub is pointing.
* The second parameter specifies the direction the USB connector on the Hub is pointing.
* All directions are relative to the robot, and left/right is as-viewed from behind the robot.
*
* If you are using a REV 9-Axis IMU, you can use the Rev9AxisImuOrientationOnRobot class instead of the
* RevHubOrientationOnRobot class, which has an I2cPortFacingDirection instead of a UsbFacingDirection.
*/
/* The next two lines define Hub orientation.

View File

@ -0,0 +1,159 @@
/*
Copyright (c) 2024 Limelight Vision
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted (subject to the limitations in the disclaimer below) provided that
the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of FIRST nor the names of its contributors may be used to
endorse or promote products derived from this software without specific prior
written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.hardware.limelightvision.LLResult;
import com.qualcomm.hardware.limelightvision.LLResultTypes;
import com.qualcomm.hardware.limelightvision.LLStatus;
import com.qualcomm.hardware.limelightvision.Limelight3A;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.navigation.Pose3D;
import java.util.List;
/*
* This OpMode illustrates how to use the Limelight3A Vision Sensor.
*
* @see <a href="https://limelightvision.io/">Limelight</a>
*
* Notes on configuration:
*
* The device presents itself, when plugged into a USB port on a Control Hub as an ethernet
* interface. A DHCP server running on the Limelight automatically assigns the Control Hub an
* ip address for the new ethernet interface.
*
* Since the Limelight is plugged into a USB port, it will be listed on the top level configuration
* activity along with the Control Hub Portal and other USB devices such as webcams. Typically
* serial numbers are displayed below the device's names. In the case of the Limelight device, the
* Control Hub's assigned ip address for that ethernet interface is used as the "serial number".
*
* Tapping the Limelight's name, transitions to a new screen where the user can rename the Limelight
* and specify the Limelight's ip address. Users should take care not to confuse the ip address of
* the Limelight itself, which can be configured through the Limelight settings page via a web browser,
* and the ip address the Limelight device assigned the Control Hub and which is displayed in small text
* below the name of the Limelight on the top level configuration screen.
*/
@TeleOp(name = "Sensor: Limelight3A", group = "Sensor")
@Disabled
public class SensorLimelight3A extends LinearOpMode {
private Limelight3A limelight;
@Override
public void runOpMode() throws InterruptedException
{
limelight = hardwareMap.get(Limelight3A.class, "limelight");
telemetry.setMsTransmissionInterval(11);
limelight.pipelineSwitch(0);
/*
* Starts polling for data. If you neglect to call start(), getLatestResult() will return null.
*/
limelight.start();
telemetry.addData(">", "Robot Ready. Press Play.");
telemetry.update();
waitForStart();
while (opModeIsActive()) {
LLStatus status = limelight.getStatus();
telemetry.addData("Name", "%s",
status.getName());
telemetry.addData("LL", "Temp: %.1fC, CPU: %.1f%%, FPS: %d",
status.getTemp(), status.getCpu(),(int)status.getFps());
telemetry.addData("Pipeline", "Index: %d, Type: %s",
status.getPipelineIndex(), status.getPipelineType());
LLResult result = limelight.getLatestResult();
if (result != null) {
// Access general information
Pose3D botpose = result.getBotpose();
double captureLatency = result.getCaptureLatency();
double targetingLatency = result.getTargetingLatency();
double parseLatency = result.getParseLatency();
telemetry.addData("LL Latency", captureLatency + targetingLatency);
telemetry.addData("Parse Latency", parseLatency);
telemetry.addData("PythonOutput", java.util.Arrays.toString(result.getPythonOutput()));
if (result.isValid()) {
telemetry.addData("tx", result.getTx());
telemetry.addData("txnc", result.getTxNC());
telemetry.addData("ty", result.getTy());
telemetry.addData("tync", result.getTyNC());
telemetry.addData("Botpose", botpose.toString());
// Access barcode results
List<LLResultTypes.BarcodeResult> barcodeResults = result.getBarcodeResults();
for (LLResultTypes.BarcodeResult br : barcodeResults) {
telemetry.addData("Barcode", "Data: %s", br.getData());
}
// Access classifier results
List<LLResultTypes.ClassifierResult> classifierResults = result.getClassifierResults();
for (LLResultTypes.ClassifierResult cr : classifierResults) {
telemetry.addData("Classifier", "Class: %s, Confidence: %.2f", cr.getClassName(), cr.getConfidence());
}
// Access detector results
List<LLResultTypes.DetectorResult> detectorResults = result.getDetectorResults();
for (LLResultTypes.DetectorResult dr : detectorResults) {
telemetry.addData("Detector", "Class: %s, Area: %.2f", dr.getClassName(), dr.getTargetArea());
}
// Access fiducial results
List<LLResultTypes.FiducialResult> fiducialResults = result.getFiducialResults();
for (LLResultTypes.FiducialResult fr : fiducialResults) {
telemetry.addData("Fiducial", "ID: %d, Family: %s, X: %.2f, Y: %.2f", fr.getFiducialId(), fr.getFamily(),fr.getTargetXDegrees(), fr.getTargetYDegrees());
}
// Access color results
List<LLResultTypes.ColorResult> colorResults = result.getColorResults();
for (LLResultTypes.ColorResult cr : colorResults) {
telemetry.addData("Color", "X: %.2f, Y: %.2f", cr.getTargetXDegrees(), cr.getTargetYDegrees());
}
}
} else {
telemetry.addData("Limelight", "No data available");
}
telemetry.update();
}
limelight.stop();
}
}

View File

@ -52,7 +52,7 @@ import org.firstinspires.ftc.robotcore.external.Telemetry;
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*
* See the sensor's product page: https://www.tindie.com/products/digitalchickenlabs/octoquad-8ch-quadrature-pulse-width-decoder/
* See the sensor's product page: https://www.tindie.com/products/35114/
*/
@TeleOp(name = "OctoQuad Basic", group="OctoQuad")
@Disabled
@ -60,7 +60,7 @@ public class SensorOctoQuad extends LinearOpMode {
// Identify which encoder OctoQuad inputs are connected to each odometry pod.
private final int ODO_LEFT = 0; // Facing forward direction on left side of robot (Axial motion)
private final int ODO_RIGHT = 1; // Facing forward direction on right side or robot (Axial motion )
private final int ODO_RIGHT = 1; // Facing forward direction on right side or robot (Axial motion)
private final int ODO_PERP = 2; // Facing perpendicular direction at the center of the robot (Lateral motion)
// Declare the OctoQuad object and members to store encoder positions and velocities
@ -83,7 +83,7 @@ public class SensorOctoQuad extends LinearOpMode {
telemetry.addData("OctoQuad Firmware Version ", octoquad.getFirmwareVersion());
// Reverse the count-direction of any encoder that is not what you require.
// Eg: if you push the robot forward and the left encoder counts down, then reverse it so it counts up.
// e.g. if you push the robot forward and the left encoder counts down, then reverse it so it counts up.
octoquad.setSingleEncoderDirection(ODO_LEFT, OctoQuad.EncoderDirection.REVERSE);
octoquad.setSingleEncoderDirection(ODO_RIGHT, OctoQuad.EncoderDirection.FORWARD);
octoquad.setSingleEncoderDirection(ODO_PERP, OctoQuad.EncoderDirection.FORWARD);
@ -91,7 +91,7 @@ public class SensorOctoQuad extends LinearOpMode {
// Any changes that are made should be saved in FLASH just in case there is a sensor power glitch.
octoquad.saveParametersToFlash();
telemetry.addLine("\nPress Play to read encoder values");
telemetry.addLine("\nPress START to read encoder values");
telemetry.update();
waitForStart();
@ -100,23 +100,23 @@ public class SensorOctoQuad extends LinearOpMode {
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
telemetry.setMsTransmissionInterval(50);
// Set all the encoder inputs to zero
// Set all the encoder inputs to zero.
octoquad.resetAllPositions();
// Loop while displaying the odometry pod positions.
while (opModeIsActive()) {
telemetry.addData(">", "Press X to Reset Encoders\n");
// Check for X button to reset encoders
// Check for X button to reset encoders.
if (gamepad1.x) {
// Reset the position of all encoders to zero.
octoquad.resetAllPositions();
}
// Read all the encoder data. Load into local members
// Read all the encoder data. Load into local members.
readOdometryPods();
// Display the values
// Display the values.
telemetry.addData("Left ", "%8d counts", posLeft);
telemetry.addData("Right", "%8d counts", posRight);
telemetry.addData("Perp ", "%8d counts", posPerp);
@ -131,11 +131,11 @@ public class SensorOctoQuad extends LinearOpMode {
// or
// readAllPositions() to get all 8 encoder readings
//
// Since both calls take almost the same amount of time, and the actual channels may not end up being sequential,
// we will read all of the encoder positions, and then pick out the ones we need.
// Since both calls take almost the same amount of time, and the actual channels may not end up
// being sequential, we will read all of the encoder positions, and then pick out the ones we need.
int[] positions = octoquad.readAllPositions();
posLeft = positions[ODO_LEFT];
posRight = positions[ODO_RIGHT];
posPerp = positions[ODO_PERP];
}
}
}

View File

@ -77,7 +77,7 @@ import java.util.List;
* Note: If you prefer, you can move the two support classes from this file, and place them in their own files.
* But leaving them in place is simpler for this example.
*
* See the sensor's product page: https://www.tindie.com/products/digitalchickenlabs/octoquad-8ch-quadrature-pulse-width-decoder/
* See the sensor's product page: https://www.tindie.com/products/35114/
*/
@TeleOp(name="OctoQuad Advanced", group="OctoQuad")
@Disabled
@ -93,7 +93,7 @@ public class SensorOctoQuadAdv extends LinearOpMode {
// Display the OctoQuad firmware revision
telemetry.addLine("OctoQuad Firmware v" + octoquad.getFirmwareVersion());
telemetry.addLine("\nPress Play to read encoder values");
telemetry.addLine("\nPress START to read encoder values");
telemetry.update();
waitForStart();
@ -275,4 +275,4 @@ class OctoSwerveModule {
public void show(Telemetry telemetry) {
telemetry.addData(name, "%8.0f %7.0f %7.0f %6.0f", driveCounts, driveCountsPerSec, steerDegrees, steerDegreesPerSec);
}
}
}

View File

@ -446,7 +446,7 @@ public class UtilityOctoQuadConfigMenu extends LinearOpMode
StringBuilder builder = new StringBuilder();
builder.append("<font color='#119af5' face=monospace>");
builder.append("Navigate items.....dpad up/down\n")
.append("Select.............X\n")
.append("Select.............X or Square\n")
.append("Edit option........dpad left/right\n")
.append("Up one level.......left bumper\n");
builder.append("</font>");
@ -614,7 +614,7 @@ public class UtilityOctoQuadConfigMenu extends LinearOpMode
@Override
public void onClick()
{
onRightInput();
//onRightInput();
}
@Override
@ -669,7 +669,7 @@ public class UtilityOctoQuadConfigMenu extends LinearOpMode
@Override
public void onClick()
{
onRightInput();
//onRightInput();
}
@Override

View File

@ -1,4 +1,11 @@
# Road Runner Quickstart
# Welcome to the Offical Pedro Pathing Quickstart!
Check out the [docs](https://rr.brott.dev/docs/v1-0/tuning/).
To begin, clone this repo and then open it in Android Studio.
Then, follow the steps on our [website](https://pedropathing.com/)!
For Path Generation, you can use this [path generator](https://discord.gg/2GfC4qBP5s).
---
### Feel Free to reach out on the [Offical Pedro Pathing Discord Server](https://discord.gg/2GfC4qBP5s)!

View File

@ -23,18 +23,10 @@ android {
}
}
repositories {
maven {
url = 'https://maven.brott.dev/'
}
}
dependencies {
implementation project(':FtcRobotController')
annotationProcessor files('lib/OpModeAnnotationProcessor.jar')
implementation "com.acmerobotics.roadrunner:ftc:0.1.13"
implementation "com.acmerobotics.roadrunner:core:1.0.0"
implementation "com.acmerobotics.roadrunner:actions:1.0.0"
implementation "com.acmerobotics.dashboard:dashboard:0.4.14"
implementation 'com.fasterxml.jackson.core:jackson-databind:2.12.7'
implementation 'org.jetbrains.kotlin:kotlin-stdlib:1.4.21'
}

View File

@ -0,0 +1,115 @@
package org.firstinspires.ftc.teamcode;
/*
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.acmerobotics.roadrunner.SleepAction;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.cometbots.CometBotTeleopCompetition;
import org.firstinspires.ftc.teamcode.configs.RobotConstants;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
import org.firstinspires.ftc.teamcode.subsystem.AutoPark;
import org.firstinspires.ftc.teamcode.subsystem.HighBasketAutoPath1;
import org.firstinspires.ftc.teamcode.subsystem.HighBasketAutoPath2;
import org.firstinspires.ftc.teamcode.subsystem.LiftActionsSubsystem;
import org.firstinspires.ftc.teamcode.subsystem.SkyHookSubsystem;
@Autonomous(name = "Auto Test Competition", group = "Dev")
public class BlueBasketAuto extends OpMode {
private Follower follower;
private int state;
private HighBasketAutoPath1 path1;
private HighBasketAutoPath2 path2;
private AutoPark pathPark;
private SkyHookSubsystem hook;
private CometBotTeleopCompetition comp;
private ElapsedTime runtime;
private LiftActionsSubsystem liftActionsSubsystem;
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.75);
path1 = new HighBasketAutoPath1();
path2 = new HighBasketAutoPath2();
pathPark = new AutoPark();
comp = new CometBotTeleopCompetition(hardwareMap, telemetry, gamepad1, gamepad2);
comp.initCloseClaw();
runtime = new ElapsedTime();
hook = new SkyHookSubsystem(hardwareMap);
state = 0;
}
@Override
public void loop() {
switch(state) {
case 0:
telemetry.addData("case0", "case0");
path1.moveToPath1(follower);
state = 1;
runtime.reset();
case 1:
if (runtime.seconds() > 5) {
telemetry.addData("case1", "case1");
new SleepAction(.5);
comp.highBucketDropAuto();
state = 2;
}
case 2:
if (runtime.seconds() > 15) {
telemetry.addData("case2", "case2");
// new SleepAsction(.5);
//path2.moveToPath1(follower);
//For next time, add encoder control to skyhook and extend here
//comp.moveSkyHook();
//pathPark.moveToPark(follower);
state = 3;
}
case 3:
if (runtime.seconds() > 15) {
telemetry.addData("case3", "case3");
hook.toLevel1Position();
state = 4;
}
case 4:
if (runtime.seconds() > 15) {
telemetry.addData("case3", "case3");
hook.toLevel1Position();
state = 4;
}
//System.out.println("default");
//telemetry.addData("default", "default");
//telemetry.update();
}
telemetry.update();
follower.update();
//follower.telemetryDebug(telemetry);
}
}
*/

View File

@ -0,0 +1,33 @@
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.teamcode.cometbots.CometBotTeleOpDevelopment;
import org.firstinspires.ftc.teamcode.subsystems.ArmSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.ClawSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.DualMotorSliderSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.HangMotorSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.WristSubsystem;
@TeleOp(name = "CometBot Drive V2.137", group = "Competition")
public class CometBotTeleOpCompetition extends OpMode {
public DualMotorSliderSubsystem slide;
public CometBotTeleOpDevelopment runMode;
@Override
public void init() {
runMode = new CometBotTeleOpDevelopment(hardwareMap, gamepad1, gamepad2);
slide = new DualMotorSliderSubsystem(hardwareMap);
runMode.init();
}
@Override
public void loop() {
runMode.update();
telemetry.update();
}
public void stop(){
}
}

View File

@ -1,22 +0,0 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.dashboard.canvas.Canvas;
import com.acmerobotics.roadrunner.Pose2d;
import com.acmerobotics.roadrunner.Vector2d;
public final class Drawing {
private Drawing() {}
public static void drawRobot(Canvas c, Pose2d t) {
final double ROBOT_RADIUS = 9;
c.setStrokeWidth(1);
c.strokeCircle(t.position.x, t.position.y, ROBOT_RADIUS);
Vector2d halfv = t.heading.vec().times(0.5 * ROBOT_RADIUS);
Vector2d p1 = t.position.plus(halfv);
Vector2d p2 = p1.plus(halfv);
c.strokeLine(p1.x, p1.y, p2.x, p2.y);
}
}

View File

@ -1,8 +0,0 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.roadrunner.Time;
import com.acmerobotics.roadrunner.Twist2dDual;
public interface Localizer {
Twist2dDual<Time> update();
}

View File

@ -1,489 +0,0 @@
package org.firstinspires.ftc.teamcode;
import androidx.annotation.NonNull;
import com.acmerobotics.dashboard.canvas.Canvas;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.TelemetryPacket;
import com.acmerobotics.roadrunner.*;
import com.acmerobotics.roadrunner.AngularVelConstraint;
import com.acmerobotics.roadrunner.DualNum;
import com.acmerobotics.roadrunner.HolonomicController;
import com.acmerobotics.roadrunner.MecanumKinematics;
import com.acmerobotics.roadrunner.MinVelConstraint;
import com.acmerobotics.roadrunner.MotorFeedforward;
import com.acmerobotics.roadrunner.Pose2d;
import com.acmerobotics.roadrunner.Pose2dDual;
import com.acmerobotics.roadrunner.ProfileAccelConstraint;
import com.acmerobotics.roadrunner.Time;
import com.acmerobotics.roadrunner.TimeTrajectory;
import com.acmerobotics.roadrunner.TimeTurn;
import com.acmerobotics.roadrunner.TrajectoryActionBuilder;
import com.acmerobotics.roadrunner.TurnConstraints;
import com.acmerobotics.roadrunner.Twist2dDual;
import com.acmerobotics.roadrunner.VelConstraint;
import com.acmerobotics.roadrunner.ftc.DownsampledWriter;
import com.acmerobotics.roadrunner.ftc.Encoder;
import com.acmerobotics.roadrunner.ftc.FlightRecorder;
import com.acmerobotics.roadrunner.ftc.LazyImu;
import com.acmerobotics.roadrunner.ftc.LynxFirmware;
import com.acmerobotics.roadrunner.ftc.OverflowEncoder;
import com.acmerobotics.roadrunner.ftc.PositionVelocityPair;
import com.acmerobotics.roadrunner.ftc.RawEncoder;
import com.qualcomm.hardware.lynx.LynxModule;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import com.qualcomm.robotcore.hardware.VoltageSensor;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
import org.firstinspires.ftc.teamcode.messages.DriveCommandMessage;
import org.firstinspires.ftc.teamcode.messages.MecanumCommandMessage;
import org.firstinspires.ftc.teamcode.messages.MecanumLocalizerInputsMessage;
import org.firstinspires.ftc.teamcode.messages.PoseMessage;
import java.lang.Math;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
@Config
public final class MecanumDrive {
public static class Params {
// IMU orientation
// TODO: fill in these values based on
// see https://ftc-docs.firstinspires.org/en/latest/programming_resources/imu/imu.html?highlight=imu#physical-hub-mounting
public RevHubOrientationOnRobot.LogoFacingDirection logoFacingDirection =
RevHubOrientationOnRobot.LogoFacingDirection.UP;
public RevHubOrientationOnRobot.UsbFacingDirection usbFacingDirection =
RevHubOrientationOnRobot.UsbFacingDirection.FORWARD;
// drive model parameters
public double inPerTick = 1;
public double lateralInPerTick = inPerTick;
public double trackWidthTicks = 0;
// feedforward parameters (in tick units)
public double kS = 0;
public double kV = 0;
public double kA = 0;
// path profile parameters (in inches)
public double maxWheelVel = 50;
public double minProfileAccel = -30;
public double maxProfileAccel = 50;
// turn profile parameters (in radians)
public double maxAngVel = Math.PI; // shared with path
public double maxAngAccel = Math.PI;
// path controller gains
public double axialGain = 0.0;
public double lateralGain = 0.0;
public double headingGain = 0.0; // shared with turn
public double axialVelGain = 0.0;
public double lateralVelGain = 0.0;
public double headingVelGain = 0.0; // shared with turn
}
public static Params PARAMS = new Params();
public final MecanumKinematics kinematics = new MecanumKinematics(
PARAMS.inPerTick * PARAMS.trackWidthTicks, PARAMS.inPerTick / PARAMS.lateralInPerTick);
public final TurnConstraints defaultTurnConstraints = new TurnConstraints(
PARAMS.maxAngVel, -PARAMS.maxAngAccel, PARAMS.maxAngAccel);
public final VelConstraint defaultVelConstraint =
new MinVelConstraint(Arrays.asList(
kinematics.new WheelVelConstraint(PARAMS.maxWheelVel),
new AngularVelConstraint(PARAMS.maxAngVel)
));
public final AccelConstraint defaultAccelConstraint =
new ProfileAccelConstraint(PARAMS.minProfileAccel, PARAMS.maxProfileAccel);
public final DcMotorEx leftFront, leftBack, rightBack, rightFront;
public final VoltageSensor voltageSensor;
public final LazyImu lazyImu;
public final Localizer localizer;
public Pose2d pose;
private final LinkedList<Pose2d> poseHistory = new LinkedList<>();
private final DownsampledWriter estimatedPoseWriter = new DownsampledWriter("ESTIMATED_POSE", 50_000_000);
private final DownsampledWriter targetPoseWriter = new DownsampledWriter("TARGET_POSE", 50_000_000);
private final DownsampledWriter driveCommandWriter = new DownsampledWriter("DRIVE_COMMAND", 50_000_000);
private final DownsampledWriter mecanumCommandWriter = new DownsampledWriter("MECANUM_COMMAND", 50_000_000);
public class DriveLocalizer implements Localizer {
public final Encoder leftFront, leftBack, rightBack, rightFront;
public final IMU imu;
private int lastLeftFrontPos, lastLeftBackPos, lastRightBackPos, lastRightFrontPos;
private Rotation2d lastHeading;
private boolean initialized;
public DriveLocalizer() {
leftFront = new OverflowEncoder(new RawEncoder(MecanumDrive.this.leftFront));
leftBack = new OverflowEncoder(new RawEncoder(MecanumDrive.this.leftBack));
rightBack = new OverflowEncoder(new RawEncoder(MecanumDrive.this.rightBack));
rightFront = new OverflowEncoder(new RawEncoder(MecanumDrive.this.rightFront));
imu = lazyImu.get();
// TODO: reverse encoders if needed
// leftFront.setDirection(DcMotorSimple.Direction.REVERSE);
}
@Override
public Twist2dDual<Time> update() {
PositionVelocityPair leftFrontPosVel = leftFront.getPositionAndVelocity();
PositionVelocityPair leftBackPosVel = leftBack.getPositionAndVelocity();
PositionVelocityPair rightBackPosVel = rightBack.getPositionAndVelocity();
PositionVelocityPair rightFrontPosVel = rightFront.getPositionAndVelocity();
YawPitchRollAngles angles = imu.getRobotYawPitchRollAngles();
FlightRecorder.write("MECANUM_LOCALIZER_INPUTS", new MecanumLocalizerInputsMessage(
leftFrontPosVel, leftBackPosVel, rightBackPosVel, rightFrontPosVel, angles));
Rotation2d heading = Rotation2d.exp(angles.getYaw(AngleUnit.RADIANS));
if (!initialized) {
initialized = true;
lastLeftFrontPos = leftFrontPosVel.position;
lastLeftBackPos = leftBackPosVel.position;
lastRightBackPos = rightBackPosVel.position;
lastRightFrontPos = rightFrontPosVel.position;
lastHeading = heading;
return new Twist2dDual<>(
Vector2dDual.constant(new Vector2d(0.0, 0.0), 2),
DualNum.constant(0.0, 2)
);
}
double headingDelta = heading.minus(lastHeading);
Twist2dDual<Time> twist = kinematics.forward(new MecanumKinematics.WheelIncrements<>(
new DualNum<Time>(new double[]{
(leftFrontPosVel.position - lastLeftFrontPos),
leftFrontPosVel.velocity,
}).times(PARAMS.inPerTick),
new DualNum<Time>(new double[]{
(leftBackPosVel.position - lastLeftBackPos),
leftBackPosVel.velocity,
}).times(PARAMS.inPerTick),
new DualNum<Time>(new double[]{
(rightBackPosVel.position - lastRightBackPos),
rightBackPosVel.velocity,
}).times(PARAMS.inPerTick),
new DualNum<Time>(new double[]{
(rightFrontPosVel.position - lastRightFrontPos),
rightFrontPosVel.velocity,
}).times(PARAMS.inPerTick)
));
lastLeftFrontPos = leftFrontPosVel.position;
lastLeftBackPos = leftBackPosVel.position;
lastRightBackPos = rightBackPosVel.position;
lastRightFrontPos = rightFrontPosVel.position;
lastHeading = heading;
return new Twist2dDual<>(
twist.line,
DualNum.cons(headingDelta, twist.angle.drop(1))
);
}
}
public MecanumDrive(HardwareMap hardwareMap, Pose2d pose) {
this.pose = pose;
LynxFirmware.throwIfModulesAreOutdated(hardwareMap);
for (LynxModule module : hardwareMap.getAll(LynxModule.class)) {
module.setBulkCachingMode(LynxModule.BulkCachingMode.AUTO);
}
// TODO: make sure your config has motors with these names (or change them)
// see https://ftc-docs.firstinspires.org/en/latest/hardware_and_software_configuration/configuring/index.html
leftFront = hardwareMap.get(DcMotorEx.class, "leftFront");
leftBack = hardwareMap.get(DcMotorEx.class, "leftBack");
rightBack = hardwareMap.get(DcMotorEx.class, "rightBack");
rightFront = hardwareMap.get(DcMotorEx.class, "rightFront");
leftFront.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
leftBack.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
rightBack.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
rightFront.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
// TODO: reverse motor directions if needed
// leftFront.setDirection(DcMotorSimple.Direction.REVERSE);
// TODO: make sure your config has an IMU with this name (can be BNO or BHI)
// see https://ftc-docs.firstinspires.org/en/latest/hardware_and_software_configuration/configuring/index.html
lazyImu = new LazyImu(hardwareMap, "imu", new RevHubOrientationOnRobot(
PARAMS.logoFacingDirection, PARAMS.usbFacingDirection));
voltageSensor = hardwareMap.voltageSensor.iterator().next();
localizer = new DriveLocalizer();
FlightRecorder.write("MECANUM_PARAMS", PARAMS);
}
public void setDrivePowers(PoseVelocity2d powers) {
MecanumKinematics.WheelVelocities<Time> wheelVels = new MecanumKinematics(1).inverse(
PoseVelocity2dDual.constant(powers, 1));
double maxPowerMag = 1;
for (DualNum<Time> power : wheelVels.all()) {
maxPowerMag = Math.max(maxPowerMag, power.value());
}
leftFront.setPower(wheelVels.leftFront.get(0) / maxPowerMag);
leftBack.setPower(wheelVels.leftBack.get(0) / maxPowerMag);
rightBack.setPower(wheelVels.rightBack.get(0) / maxPowerMag);
rightFront.setPower(wheelVels.rightFront.get(0) / maxPowerMag);
}
public final class FollowTrajectoryAction implements Action {
public final TimeTrajectory timeTrajectory;
private double beginTs = -1;
private final double[] xPoints, yPoints;
public FollowTrajectoryAction(TimeTrajectory t) {
timeTrajectory = t;
List<Double> disps = com.acmerobotics.roadrunner.Math.range(
0, t.path.length(),
Math.max(2, (int) Math.ceil(t.path.length() / 2)));
xPoints = new double[disps.size()];
yPoints = new double[disps.size()];
for (int i = 0; i < disps.size(); i++) {
Pose2d p = t.path.get(disps.get(i), 1).value();
xPoints[i] = p.position.x;
yPoints[i] = p.position.y;
}
}
@Override
public boolean run(@NonNull TelemetryPacket p) {
double t;
if (beginTs < 0) {
beginTs = Actions.now();
t = 0;
} else {
t = Actions.now() - beginTs;
}
if (t >= timeTrajectory.duration) {
leftFront.setPower(0);
leftBack.setPower(0);
rightBack.setPower(0);
rightFront.setPower(0);
return false;
}
Pose2dDual<Time> txWorldTarget = timeTrajectory.get(t);
targetPoseWriter.write(new PoseMessage(txWorldTarget.value()));
PoseVelocity2d robotVelRobot = updatePoseEstimate();
PoseVelocity2dDual<Time> command = new HolonomicController(
PARAMS.axialGain, PARAMS.lateralGain, PARAMS.headingGain,
PARAMS.axialVelGain, PARAMS.lateralVelGain, PARAMS.headingVelGain
)
.compute(txWorldTarget, pose, robotVelRobot);
driveCommandWriter.write(new DriveCommandMessage(command));
MecanumKinematics.WheelVelocities<Time> wheelVels = kinematics.inverse(command);
double voltage = voltageSensor.getVoltage();
final MotorFeedforward feedforward = new MotorFeedforward(PARAMS.kS,
PARAMS.kV / PARAMS.inPerTick, PARAMS.kA / PARAMS.inPerTick);
double leftFrontPower = feedforward.compute(wheelVels.leftFront) / voltage;
double leftBackPower = feedforward.compute(wheelVels.leftBack) / voltage;
double rightBackPower = feedforward.compute(wheelVels.rightBack) / voltage;
double rightFrontPower = feedforward.compute(wheelVels.rightFront) / voltage;
mecanumCommandWriter.write(new MecanumCommandMessage(
voltage, leftFrontPower, leftBackPower, rightBackPower, rightFrontPower
));
leftFront.setPower(leftFrontPower);
leftBack.setPower(leftBackPower);
rightBack.setPower(rightBackPower);
rightFront.setPower(rightFrontPower);
p.put("x", pose.position.x);
p.put("y", pose.position.y);
p.put("heading (deg)", Math.toDegrees(pose.heading.toDouble()));
Pose2d error = txWorldTarget.value().minusExp(pose);
p.put("xError", error.position.x);
p.put("yError", error.position.y);
p.put("headingError (deg)", Math.toDegrees(error.heading.toDouble()));
// only draw when active; only one drive action should be active at a time
Canvas c = p.fieldOverlay();
drawPoseHistory(c);
c.setStroke("#4CAF50");
Drawing.drawRobot(c, txWorldTarget.value());
c.setStroke("#3F51B5");
Drawing.drawRobot(c, pose);
c.setStroke("#4CAF50FF");
c.setStrokeWidth(1);
c.strokePolyline(xPoints, yPoints);
return true;
}
@Override
public void preview(Canvas c) {
c.setStroke("#4CAF507A");
c.setStrokeWidth(1);
c.strokePolyline(xPoints, yPoints);
}
}
public final class TurnAction implements Action {
private final TimeTurn turn;
private double beginTs = -1;
public TurnAction(TimeTurn turn) {
this.turn = turn;
}
@Override
public boolean run(@NonNull TelemetryPacket p) {
double t;
if (beginTs < 0) {
beginTs = Actions.now();
t = 0;
} else {
t = Actions.now() - beginTs;
}
if (t >= turn.duration) {
leftFront.setPower(0);
leftBack.setPower(0);
rightBack.setPower(0);
rightFront.setPower(0);
return false;
}
Pose2dDual<Time> txWorldTarget = turn.get(t);
targetPoseWriter.write(new PoseMessage(txWorldTarget.value()));
PoseVelocity2d robotVelRobot = updatePoseEstimate();
PoseVelocity2dDual<Time> command = new HolonomicController(
PARAMS.axialGain, PARAMS.lateralGain, PARAMS.headingGain,
PARAMS.axialVelGain, PARAMS.lateralVelGain, PARAMS.headingVelGain
)
.compute(txWorldTarget, pose, robotVelRobot);
driveCommandWriter.write(new DriveCommandMessage(command));
MecanumKinematics.WheelVelocities<Time> wheelVels = kinematics.inverse(command);
double voltage = voltageSensor.getVoltage();
final MotorFeedforward feedforward = new MotorFeedforward(PARAMS.kS,
PARAMS.kV / PARAMS.inPerTick, PARAMS.kA / PARAMS.inPerTick);
double leftFrontPower = feedforward.compute(wheelVels.leftFront) / voltage;
double leftBackPower = feedforward.compute(wheelVels.leftBack) / voltage;
double rightBackPower = feedforward.compute(wheelVels.rightBack) / voltage;
double rightFrontPower = feedforward.compute(wheelVels.rightFront) / voltage;
mecanumCommandWriter.write(new MecanumCommandMessage(
voltage, leftFrontPower, leftBackPower, rightBackPower, rightFrontPower
));
leftFront.setPower(feedforward.compute(wheelVels.leftFront) / voltage);
leftBack.setPower(feedforward.compute(wheelVels.leftBack) / voltage);
rightBack.setPower(feedforward.compute(wheelVels.rightBack) / voltage);
rightFront.setPower(feedforward.compute(wheelVels.rightFront) / voltage);
Canvas c = p.fieldOverlay();
drawPoseHistory(c);
c.setStroke("#4CAF50");
Drawing.drawRobot(c, txWorldTarget.value());
c.setStroke("#3F51B5");
Drawing.drawRobot(c, pose);
c.setStroke("#7C4DFFFF");
c.fillCircle(turn.beginPose.position.x, turn.beginPose.position.y, 2);
return true;
}
@Override
public void preview(Canvas c) {
c.setStroke("#7C4DFF7A");
c.fillCircle(turn.beginPose.position.x, turn.beginPose.position.y, 2);
}
}
public PoseVelocity2d updatePoseEstimate() {
Twist2dDual<Time> twist = localizer.update();
pose = pose.plus(twist.value());
poseHistory.add(pose);
while (poseHistory.size() > 100) {
poseHistory.removeFirst();
}
estimatedPoseWriter.write(new PoseMessage(pose));
return twist.velocity().value();
}
private void drawPoseHistory(Canvas c) {
double[] xPoints = new double[poseHistory.size()];
double[] yPoints = new double[poseHistory.size()];
int i = 0;
for (Pose2d t : poseHistory) {
xPoints[i] = t.position.x;
yPoints[i] = t.position.y;
i++;
}
c.setStrokeWidth(1);
c.setStroke("#3F51B5");
c.strokePolyline(xPoints, yPoints);
}
public TrajectoryActionBuilder actionBuilder(Pose2d beginPose) {
return new TrajectoryActionBuilder(
TurnAction::new,
FollowTrajectoryAction::new,
new TrajectoryBuilderParams(
1e-6,
new ProfileParams(
0.25, 0.1, 1e-2
)
),
beginPose, 0.0,
defaultTurnConstraints,
defaultVelConstraint, defaultAccelConstraint
);
}
}

View File

@ -0,0 +1,86 @@
package org.firstinspires.ftc.teamcode;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotorSimple.Direction;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
public class PedroConstants {
/*
Robot parameters
*/
// Robot motor configurations
public static final String FRONT_LEFT_MOTOR = "front-left";
public static final String BACK_LEFT_MOTOR = "back-left";
public static final String FRONT_RIGHT_MOTOR = "front-right";
public static final String BACK_RIGHT_MOTOR = "back-right";
// Robot motor direction
public static final Direction FRONT_LEFT_MOTOR_DIRECTION = Direction.REVERSE;
public static final Direction BACK_LEFT_MOTOR_DIRECTION = Direction.REVERSE;
public static final Direction FRONT_RIGHT_MOTOR_DIRECTION = Direction.FORWARD;
public static final Direction BACK_RIGHT_MOTOR_DIRECTION = Direction.FORWARD;
/*
Motor Max Power
*/
public static final double MAX_POWER = .75;
// Robot IMU configuration
public static final String IMU = "imu";
// Robot IMU placement
public static final RevHubOrientationOnRobot.LogoFacingDirection IMU_LOGO_FACING_DIRECTION
= RevHubOrientationOnRobot.LogoFacingDirection.LEFT;
public static final RevHubOrientationOnRobot.UsbFacingDirection IMU_USB_FACING_DIRECTION
= RevHubOrientationOnRobot.UsbFacingDirection.UP;
// Robot encoders
// NOTE: Encoders are plugged into the same ports as motors hence the weird names
public static final String RIGHT_ENCODER = "front-left"; //2
public static final String BACK_ENCODER = "front-right"; //1
public static final String LEFT_ENCODER = "back-right"; //0
// Robot encoder direction
public static final double LEFT_ENCODER_DIRECTION = Encoder.FORWARD;
public static final double RIGHT_ENCODER_DIRECTION = Encoder.REVERSE;
public static final double BACK_ENCODER_DIRECTION = Encoder.REVERSE;
// Arm config
public static final String LIFT_SLIDE_LEFT_MOTOR = "lift-slide-left";
public static final String LIFT_SLIDE_RIGHT_MOTOR = "lift-slide-right";
public static final String CLAW_NAME = "claw-servo";
public static final String WRIST_SERVO = "wrist-servo";
public static final String ARM_SERVO = "arm-servo";
public static final String THUMB_SERVO = "thumb-servo";
public static final String HOOK = "skyhook";
/*
Pedro's parameters
*/
// The weight of the robot in Kilograms
public static final double ROBOT_WEIGHT_IN_KG = 9;
// Maximum velocity of the robot going forward
public static final double ROBOT_SPEED_FORWARD = 50.02;
// Maximum velocity of the robot going right
public static final double ROBOT_SPEED_LATERAL = 36.07;
// Rate of deceleration when power is cut-off when the robot is moving forward
public static final double FORWARD_ZERO_POWER_ACCEL = -91.4557;
// Rate of deceleration when power is cut-off when the robot is moving to the right
public static final double LATERAL_ZERO_POWER_ACCEL = -98.514;
// Determines how fast your robot will decelerate as a factor of how fast your robot will coast to a stop
public static final double ZERO_POWER_ACCEL_MULT = 4.0;
/* Centripetal force correction - increase if robot is correcting into the path
- decrease if robot is correcting away from the path */
public static final double CENTRIPETAL_SCALING = 0.0004;
}

View File

@ -0,0 +1,54 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
@Autonomous(name = "PreLoadedBlue", group = "Competition")
public class PreLoadedBlueBasketAuto extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(8, 55);
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.20);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierLine(
new Point(8, 55, Point.CARTESIAN),
new Point(24, 21, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(-180)).build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
@Override
public void loop() {
follower.update();
follower.telemetryDebug(telemetryA);
}
}

View File

@ -0,0 +1,131 @@
## TeamCode Module
Welcome!
This module, TeamCode, is the place where you will write/paste the code for your team's
robot controller App. This module is currently empty (a clean slate) but the
process for adding OpModes is straightforward.
## Creating your own OpModes
The easiest way to create your own OpMode is to copy a Sample OpMode and make it your own.
Sample opmodes exist in the FtcRobotController module.
To locate these samples, find the FtcRobotController module in the "Project/Android" tab.
Expand the following tree elements:
FtcRobotController/java/org.firstinspires.ftc.robotcontroller/external/samples
### Naming of Samples
To gain a better understanding of how the samples are organized, and how to interpret the
naming system, it will help to understand the conventions that were used during their creation.
These conventions are described (in detail) in the sample_conventions.md file in this folder.
To summarize: A range of different samples classes will reside in the java/external/samples.
The class names will follow a naming convention which indicates the purpose of each class.
The prefix of the name will be one of the following:
Basic: This is a minimally functional OpMode used to illustrate the skeleton/structure
of a particular style of OpMode. These are bare bones examples.
Sensor: This is a Sample OpMode that shows how to use a specific sensor.
It is not intended to drive a functioning robot, it is simply showing the minimal code
required to read and display the sensor values.
Robot: This is a Sample OpMode that assumes a simple two-motor (differential) drive base.
It may be used to provide a common baseline driving OpMode, or
to demonstrate how a particular sensor or concept can be used to navigate.
Concept: This is a sample OpMode that illustrates performing a specific function or concept.
These may be complex, but their operation should be explained clearly in the comments,
or the comments should reference an external doc, guide or tutorial.
Each OpMode should try to only demonstrate a single concept so they are easy to
locate based on their name. These OpModes may not produce a drivable robot.
After the prefix, other conventions will apply:
* Sensor class names are constructed as: Sensor - Company - Type
* Robot class names are constructed as: Robot - Mode - Action - OpModetype
* Concept class names are constructed as: Concept - Topic - OpModetype
Once you are familiar with the range of samples available, you can choose one to be the
basis for your own robot. In all cases, the desired sample(s) needs to be copied into
your TeamCode module to be used.
This is done inside Android Studio directly, using the following steps:
1) Locate the desired sample class in the Project/Android tree.
2) Right click on the sample class and select "Copy"
3) Expand the TeamCode/java folder
4) Right click on the org.firstinspires.ftc.teamcode folder and select "Paste"
5) You will be prompted for a class name for the copy.
Choose something meaningful based on the purpose of this class.
Start with a capital letter, and remember that there may be more similar classes later.
Once your copy has been created, you should prepare it for use on your robot.
This is done by adjusting the OpMode's name, and enabling it to be displayed on the
Driver Station's OpMode list.
Each OpMode sample class begins with several lines of code like the ones shown below:
```
@TeleOp(name="Template: Linear OpMode", group="Linear Opmode")
@Disabled
```
The name that will appear on the driver station's "opmode list" is defined by the code:
``name="Template: Linear OpMode"``
You can change what appears between the quotes to better describe your opmode.
The "group=" portion of the code can be used to help organize your list of OpModes.
As shown, the current OpMode will NOT appear on the driver station's OpMode list because of the
``@Disabled`` annotation which has been included.
This line can simply be deleted , or commented out, to make the OpMode visible.
## ADVANCED Multi-Team App management: Cloning the TeamCode Module
In some situations, you have multiple teams in your club and you want them to all share
a common code organization, with each being able to *see* the others code but each having
their own team module with their own code that they maintain themselves.
In this situation, you might wish to clone the TeamCode module, once for each of these teams.
Each of the clones would then appear along side each other in the Android Studio module list,
together with the FtcRobotController module (and the original TeamCode module).
Selective Team phones can then be programmed by selecting the desired Module from the pulldown list
prior to clicking to the green Run arrow.
Warning: This is not for the inexperienced Software developer.
You will need to be comfortable with File manipulations and managing Android Studio Modules.
These changes are performed OUTSIDE of Android Studios, so close Android Studios before you do this.
Also.. Make a full project backup before you start this :)
To clone TeamCode, do the following:
Note: Some names start with "Team" and others start with "team". This is intentional.
1) Using your operating system file management tools, copy the whole "TeamCode"
folder to a sibling folder with a corresponding new name, eg: "Team0417".
2) In the new Team0417 folder, delete the TeamCode.iml file.
3) the new Team0417 folder, rename the "src/main/java/org/firstinspires/ftc/teamcode" folder
to a matching name with a lowercase 'team' eg: "team0417".
4) In the new Team0417/src/main folder, edit the "AndroidManifest.xml" file, change the line that contains
package="org.firstinspires.ftc.teamcode"
to be
package="org.firstinspires.ftc.team0417"
5) Add: include ':Team0417' to the "/settings.gradle" file.
6) Open up Android Studios and clean out any old files by using the menu to "Build/Clean Project""

View File

@ -0,0 +1,117 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.roadrunner.SleepAction;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.subsystems.ArmSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.ClawSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.DualMotorSliderSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.HangMotorSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.WristSubsystem;
@Autonomous(name = "Specimen Auto", group = "Development")
public class SpecimenAuto extends OpMode {
private Follower follower;
private int state;
private ElapsedTime runtime;
private DualMotorSliderSubsystem lift;
private ClawSubsystem claw;
private WristSubsystem wrist;
private ArmSubsystem arm;
private HangMotorSubsystem hang;
@Override
public void init(){
lift = new DualMotorSliderSubsystem(hardwareMap);
claw = new ClawSubsystem(hardwareMap);
arm = new ArmSubsystem(hardwareMap);
wrist = new WristSubsystem(hardwareMap);
hang = new HangMotorSubsystem(hardwareMap);
follower = new Follower(hardwareMap);
runtime = new ElapsedTime();
state = 0;
lift.init();
claw.init();
wrist.InitAuto();
arm.initAuto();
follower.setMaxPower(.45);
}
@Override
public void loop() {
if(runtime != null){
telemetry.addData("Runtime (seconds)", runtime.seconds());
}
telemetry.addData("state", state);
switch(state) {
case 0:
runtime.reset();
wrist.toSpecimenPrep();
arm.toSpecimenPrep();
if(runtime.seconds() > 0.25){state = 1;}
break;
case 1:
//line 1
if(runtime.seconds() > 3){state = 2;}
break;
case 2:
lift.toSpecimanHangHeight();
if(runtime.seconds() > 3.75){state = 3;}
break;
case 3:
wrist.toSpecimenHang();
if(runtime.seconds() > 4){state = 4;}
break;
case 4:
lift.toSpecimanReleaseHeight();
if(runtime.seconds() > 4.5){state = 5;}
break;
case 5:
claw.switchState();
if(runtime.seconds() > 4.65){state = 6;}
break;
case 6:
lift.toFloorPosition();
if(runtime.seconds() > 4.75){state = 7;}
break;
case 7:
arm.toParkPosition();
wrist.toTravelPosition();
//line 2
if(runtime.seconds() > 0.){state = 8;}
break;
case 8:
if(runtime.seconds() > 0.25){state = 9;}
break;
case 9:
//specimen drop
break;
case 10:
//path 6
break;
case 11:
//specimen pickup
break;
case 12:
//path 7
break;
case 13:
//specimen drop
break;
}
new SleepAction(5);
}
}

View File

@ -0,0 +1,170 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
@Autonomous(name = "Specimen Line Test", group = "Competition")
public class SpecimenAutoLineTest extends OpMode {
private Telemetry telemetryA;
private Follower follower;
private PathChain path;
private final Pose startPose = new Pose(8, 55);
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.35);
follower.setStartingPose(startPose);
path = follower.pathBuilder()
.addPath(
// Line 1
new BezierLine(
new Point(8.000, 55.000, Point.CARTESIAN),
new Point(39.500, 60.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(0))
.addPath(
// Line 2
new BezierLine(
new Point(39.500, 60.000, Point.CARTESIAN),
new Point(37.000, 60.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 3
new BezierCurve(
new Point(37.000, 60.000, Point.CARTESIAN),
new Point(16.000, 12.000, Point.CARTESIAN),
new Point(80.000, 54.000, Point.CARTESIAN),
new Point(58.000, 23.500, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 4
new BezierLine(
new Point(58.000, 23.500, Point.CARTESIAN),
new Point(14.000, 23.500, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(0))
.addPath(
// Line 5
new BezierLine(
new Point(14.000, 23.500, Point.CARTESIAN),
new Point(58.000, 23.500, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(180))
.addPath(
// Line 6
new BezierCurve(
new Point(58.000, 23.500, Point.CARTESIAN),
new Point(64.000, 6.000, Point.CARTESIAN),
new Point(14.000, 12.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(180))
.addPath(
// Line 7
new BezierCurve(
new Point(14.000, 12.000, Point.CARTESIAN),
new Point(60.000, 14.000, Point.CARTESIAN),
new Point(60.000, 7.500, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(180))
.addPath(
// Line 8
new BezierLine(
new Point(60.000, 7.500, Point.CARTESIAN),
new Point(14.000, 7.500, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(180))
.addPath(
// Line 9
new BezierCurve(
new Point(14.000, 7.500, Point.CARTESIAN),
new Point(34.000, 14.250, Point.CARTESIAN),
new Point(19.000, 24.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(180))
.addPath(
// Line 10
new BezierCurve(
new Point(19.000, 24.000, Point.CARTESIAN),
new Point(39.500, 24.000, Point.CARTESIAN),
new Point(19.000, 64.000, Point.CARTESIAN),
new Point(39.500, 64.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(180))
.addPath(
// Line 11
new BezierLine(
new Point(39.500, 64.000, Point.CARTESIAN),
new Point(37.000, 64.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(180))
.addPath(
// Line 12
new BezierCurve(
new Point(37.000, 64.000, Point.CARTESIAN),
new Point(19.000, 64.000, Point.CARTESIAN),
new Point(37.000, 24.000, Point.CARTESIAN),
new Point(19.000, 24.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(180))
.addPath(
// Line 13
new BezierCurve(
new Point(19.000, 24.000, Point.CARTESIAN),
new Point(39.500, 24.000, Point.CARTESIAN),
new Point(19.000, 68.000, Point.CARTESIAN),
new Point(39.500, 68.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(180))
.addPath(
// Line 14
new BezierLine(
new Point(39.500, 68.000, Point.CARTESIAN),
new Point(37.000, 68.000, Point.CARTESIAN)
)
)
.setConstantHeadingInterpolation(Math.toRadians(180)).build();
follower.followPath(path);
telemetryA = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetryA.update();
}
@Override
public void loop() {
follower.update();
follower.telemetryDebug(telemetryA);
}
}

View File

@ -1,498 +0,0 @@
package org.firstinspires.ftc.teamcode;
import androidx.annotation.NonNull;
import com.acmerobotics.dashboard.canvas.Canvas;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.TelemetryPacket;
import com.acmerobotics.roadrunner.AccelConstraint;
import com.acmerobotics.roadrunner.Action;
import com.acmerobotics.roadrunner.Actions;
import com.acmerobotics.roadrunner.AngularVelConstraint;
import com.acmerobotics.roadrunner.Arclength;
import com.acmerobotics.roadrunner.DualNum;
import com.acmerobotics.roadrunner.MinVelConstraint;
import com.acmerobotics.roadrunner.MotorFeedforward;
import com.acmerobotics.roadrunner.Pose2d;
import com.acmerobotics.roadrunner.Pose2dDual;
import com.acmerobotics.roadrunner.PoseVelocity2d;
import com.acmerobotics.roadrunner.PoseVelocity2dDual;
import com.acmerobotics.roadrunner.ProfileAccelConstraint;
import com.acmerobotics.roadrunner.ProfileParams;
import com.acmerobotics.roadrunner.RamseteController;
import com.acmerobotics.roadrunner.TankKinematics;
import com.acmerobotics.roadrunner.Time;
import com.acmerobotics.roadrunner.TimeTrajectory;
import com.acmerobotics.roadrunner.TimeTurn;
import com.acmerobotics.roadrunner.TrajectoryActionBuilder;
import com.acmerobotics.roadrunner.TrajectoryBuilderParams;
import com.acmerobotics.roadrunner.TurnConstraints;
import com.acmerobotics.roadrunner.Twist2dDual;
import com.acmerobotics.roadrunner.Vector2d;
import com.acmerobotics.roadrunner.Vector2dDual;
import com.acmerobotics.roadrunner.VelConstraint;
import com.acmerobotics.roadrunner.ftc.DownsampledWriter;
import com.acmerobotics.roadrunner.ftc.Encoder;
import com.acmerobotics.roadrunner.ftc.FlightRecorder;
import com.acmerobotics.roadrunner.ftc.LazyImu;
import com.acmerobotics.roadrunner.ftc.LynxFirmware;
import com.acmerobotics.roadrunner.ftc.OverflowEncoder;
import com.acmerobotics.roadrunner.ftc.PositionVelocityPair;
import com.acmerobotics.roadrunner.ftc.RawEncoder;
import com.qualcomm.hardware.lynx.LynxModule;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.VoltageSensor;
import org.firstinspires.ftc.teamcode.messages.DriveCommandMessage;
import org.firstinspires.ftc.teamcode.messages.PoseMessage;
import org.firstinspires.ftc.teamcode.messages.TankCommandMessage;
import org.firstinspires.ftc.teamcode.messages.TankLocalizerInputsMessage;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
@Config
public final class TankDrive {
public static class Params {
// IMU orientation
// TODO: fill in these values based on
// see https://ftc-docs.firstinspires.org/en/latest/programming_resources/imu/imu.html?highlight=imu#physical-hub-mounting
public RevHubOrientationOnRobot.LogoFacingDirection logoFacingDirection =
RevHubOrientationOnRobot.LogoFacingDirection.UP;
public RevHubOrientationOnRobot.UsbFacingDirection usbFacingDirection =
RevHubOrientationOnRobot.UsbFacingDirection.FORWARD;
// drive model parameters
public double inPerTick = 0;
public double trackWidthTicks = 0;
// feedforward parameters (in tick units)
public double kS = 0;
public double kV = 0;
public double kA = 0;
// path profile parameters (in inches)
public double maxWheelVel = 50;
public double minProfileAccel = -30;
public double maxProfileAccel = 50;
// turn profile parameters (in radians)
public double maxAngVel = Math.PI; // shared with path
public double maxAngAccel = Math.PI;
// path controller gains
public double ramseteZeta = 0.7; // in the range (0, 1)
public double ramseteBBar = 2.0; // positive
// turn controller gains
public double turnGain = 0.0;
public double turnVelGain = 0.0;
}
public static Params PARAMS = new Params();
public final TankKinematics kinematics = new TankKinematics(PARAMS.inPerTick * PARAMS.trackWidthTicks);
public final TurnConstraints defaultTurnConstraints = new TurnConstraints(
PARAMS.maxAngVel, -PARAMS.maxAngVel, PARAMS.maxAngAccel);
public final VelConstraint defaultVelConstraint =
new MinVelConstraint(Arrays.asList(
kinematics.new WheelVelConstraint(PARAMS.maxWheelVel),
new AngularVelConstraint(PARAMS.maxAngVel)
));
public final AccelConstraint defaultAccelConstraint =
new ProfileAccelConstraint(PARAMS.minProfileAccel, PARAMS.maxProfileAccel);
public final List<DcMotorEx> leftMotors, rightMotors;
public final LazyImu lazyImu;
public final VoltageSensor voltageSensor;
public final Localizer localizer;
public Pose2d pose;
private final LinkedList<Pose2d> poseHistory = new LinkedList<>();
private final DownsampledWriter estimatedPoseWriter = new DownsampledWriter("ESTIMATED_POSE", 50_000_000);
private final DownsampledWriter targetPoseWriter = new DownsampledWriter("TARGET_POSE", 50_000_000);
private final DownsampledWriter driveCommandWriter = new DownsampledWriter("DRIVE_COMMAND", 50_000_000);
private final DownsampledWriter tankCommandWriter = new DownsampledWriter("TANK_COMMAND", 50_000_000);
public class DriveLocalizer implements Localizer {
public final List<Encoder> leftEncs, rightEncs;
private double lastLeftPos, lastRightPos;
private boolean initialized;
public DriveLocalizer() {
{
List<Encoder> leftEncs = new ArrayList<>();
for (DcMotorEx m : leftMotors) {
Encoder e = new OverflowEncoder(new RawEncoder(m));
leftEncs.add(e);
}
this.leftEncs = Collections.unmodifiableList(leftEncs);
}
{
List<Encoder> rightEncs = new ArrayList<>();
for (DcMotorEx m : rightMotors) {
Encoder e = new OverflowEncoder(new RawEncoder(m));
rightEncs.add(e);
}
this.rightEncs = Collections.unmodifiableList(rightEncs);
}
// TODO: reverse encoder directions if needed
// leftEncs.get(0).setDirection(DcMotorSimple.Direction.REVERSE);
}
@Override
public Twist2dDual<Time> update() {
List<PositionVelocityPair> leftReadings = new ArrayList<>(), rightReadings = new ArrayList<>();
double meanLeftPos = 0.0, meanLeftVel = 0.0;
for (Encoder e : leftEncs) {
PositionVelocityPair p = e.getPositionAndVelocity();
meanLeftPos += p.position;
meanLeftVel += p.velocity;
leftReadings.add(p);
}
meanLeftPos /= leftEncs.size();
meanLeftVel /= leftEncs.size();
double meanRightPos = 0.0, meanRightVel = 0.0;
for (Encoder e : rightEncs) {
PositionVelocityPair p = e.getPositionAndVelocity();
meanRightPos += p.position;
meanRightVel += p.velocity;
rightReadings.add(p);
}
meanRightPos /= rightEncs.size();
meanRightVel /= rightEncs.size();
FlightRecorder.write("TANK_LOCALIZER_INPUTS",
new TankLocalizerInputsMessage(leftReadings, rightReadings));
if (!initialized) {
initialized = true;
lastLeftPos = meanLeftPos;
lastRightPos = meanRightPos;
return new Twist2dDual<>(
Vector2dDual.constant(new Vector2d(0.0, 0.0), 2),
DualNum.constant(0.0, 2)
);
}
TankKinematics.WheelIncrements<Time> twist = new TankKinematics.WheelIncrements<>(
new DualNum<Time>(new double[] {
meanLeftPos - lastLeftPos,
meanLeftVel
}).times(PARAMS.inPerTick),
new DualNum<Time>(new double[] {
meanRightPos - lastRightPos,
meanRightVel,
}).times(PARAMS.inPerTick)
);
lastLeftPos = meanLeftPos;
lastRightPos = meanRightPos;
return kinematics.forward(twist);
}
}
public TankDrive(HardwareMap hardwareMap, Pose2d pose) {
this.pose = pose;
LynxFirmware.throwIfModulesAreOutdated(hardwareMap);
for (LynxModule module : hardwareMap.getAll(LynxModule.class)) {
module.setBulkCachingMode(LynxModule.BulkCachingMode.AUTO);
}
// TODO: make sure your config has motors with these names (or change them)
// add additional motors on each side if you have them
// see https://ftc-docs.firstinspires.org/en/latest/hardware_and_software_configuration/configuring/index.html
leftMotors = Arrays.asList(hardwareMap.get(DcMotorEx.class, "left"));
rightMotors = Arrays.asList(hardwareMap.get(DcMotorEx.class, "right"));
for (DcMotorEx m : leftMotors) {
m.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
}
for (DcMotorEx m : rightMotors) {
m.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
}
// TODO: reverse motor directions if needed
// leftMotors.get(0).setDirection(DcMotorSimple.Direction.REVERSE);
// TODO: make sure your config has an IMU with this name (can be BNO or BHI)
// see https://ftc-docs.firstinspires.org/en/latest/hardware_and_software_configuration/configuring/index.html
lazyImu = new LazyImu(hardwareMap, "imu", new RevHubOrientationOnRobot(
PARAMS.logoFacingDirection, PARAMS.usbFacingDirection));
voltageSensor = hardwareMap.voltageSensor.iterator().next();
localizer = new TankDrive.DriveLocalizer();
FlightRecorder.write("TANK_PARAMS", PARAMS);
}
public void setDrivePowers(PoseVelocity2d powers) {
TankKinematics.WheelVelocities<Time> wheelVels = new TankKinematics(2).inverse(
PoseVelocity2dDual.constant(powers, 1));
double maxPowerMag = 1;
for (DualNum<Time> power : wheelVels.all()) {
maxPowerMag = Math.max(maxPowerMag, power.value());
}
for (DcMotorEx m : leftMotors) {
m.setPower(wheelVels.left.get(0) / maxPowerMag);
}
for (DcMotorEx m : rightMotors) {
m.setPower(wheelVels.right.get(0) / maxPowerMag);
}
}
public final class FollowTrajectoryAction implements Action {
public final TimeTrajectory timeTrajectory;
private double beginTs = -1;
private final double[] xPoints, yPoints;
public FollowTrajectoryAction(TimeTrajectory t) {
timeTrajectory = t;
List<Double> disps = com.acmerobotics.roadrunner.Math.range(
0, t.path.length(),
Math.max(2, (int) Math.ceil(t.path.length() / 2)));
xPoints = new double[disps.size()];
yPoints = new double[disps.size()];
for (int i = 0; i < disps.size(); i++) {
Pose2d p = t.path.get(disps.get(i), 1).value();
xPoints[i] = p.position.x;
yPoints[i] = p.position.y;
}
}
@Override
public boolean run(@NonNull TelemetryPacket p) {
double t;
if (beginTs < 0) {
beginTs = Actions.now();
t = 0;
} else {
t = Actions.now() - beginTs;
}
if (t >= timeTrajectory.duration) {
for (DcMotorEx m : leftMotors) {
m.setPower(0);
}
for (DcMotorEx m : rightMotors) {
m.setPower(0);
}
return false;
}
DualNum<Time> x = timeTrajectory.profile.get(t);
Pose2dDual<Arclength> txWorldTarget = timeTrajectory.path.get(x.value(), 3);
targetPoseWriter.write(new PoseMessage(txWorldTarget.value()));
updatePoseEstimate();
PoseVelocity2dDual<Time> command = new RamseteController(kinematics.trackWidth, PARAMS.ramseteZeta, PARAMS.ramseteBBar)
.compute(x, txWorldTarget, pose);
driveCommandWriter.write(new DriveCommandMessage(command));
TankKinematics.WheelVelocities<Time> wheelVels = kinematics.inverse(command);
double voltage = voltageSensor.getVoltage();
final MotorFeedforward feedforward = new MotorFeedforward(PARAMS.kS,
PARAMS.kV / PARAMS.inPerTick, PARAMS.kA / PARAMS.inPerTick);
double leftPower = feedforward.compute(wheelVels.left) / voltage;
double rightPower = feedforward.compute(wheelVels.right) / voltage;
tankCommandWriter.write(new TankCommandMessage(voltage, leftPower, rightPower));
for (DcMotorEx m : leftMotors) {
m.setPower(leftPower);
}
for (DcMotorEx m : rightMotors) {
m.setPower(rightPower);
}
p.put("x", pose.position.x);
p.put("y", pose.position.y);
p.put("heading (deg)", Math.toDegrees(pose.heading.toDouble()));
Pose2d error = txWorldTarget.value().minusExp(pose);
p.put("xError", error.position.x);
p.put("yError", error.position.y);
p.put("headingError (deg)", Math.toDegrees(error.heading.toDouble()));
// only draw when active; only one drive action should be active at a time
Canvas c = p.fieldOverlay();
drawPoseHistory(c);
c.setStroke("#4CAF50");
Drawing.drawRobot(c, txWorldTarget.value());
c.setStroke("#3F51B5");
Drawing.drawRobot(c, pose);
c.setStroke("#4CAF50FF");
c.setStrokeWidth(1);
c.strokePolyline(xPoints, yPoints);
return true;
}
@Override
public void preview(Canvas c) {
c.setStroke("#4CAF507A");
c.setStrokeWidth(1);
c.strokePolyline(xPoints, yPoints);
}
}
public final class TurnAction implements Action {
private final TimeTurn turn;
private double beginTs = -1;
public TurnAction(TimeTurn turn) {
this.turn = turn;
}
@Override
public boolean run(@NonNull TelemetryPacket p) {
double t;
if (beginTs < 0) {
beginTs = Actions.now();
t = 0;
} else {
t = Actions.now() - beginTs;
}
if (t >= turn.duration) {
for (DcMotorEx m : leftMotors) {
m.setPower(0);
}
for (DcMotorEx m : rightMotors) {
m.setPower(0);
}
return false;
}
Pose2dDual<Time> txWorldTarget = turn.get(t);
targetPoseWriter.write(new PoseMessage(txWorldTarget.value()));
PoseVelocity2d robotVelRobot = updatePoseEstimate();
PoseVelocity2dDual<Time> command = new PoseVelocity2dDual<>(
Vector2dDual.constant(new Vector2d(0, 0), 3),
txWorldTarget.heading.velocity().plus(
PARAMS.turnGain * pose.heading.minus(txWorldTarget.heading.value()) +
PARAMS.turnVelGain * (robotVelRobot.angVel - txWorldTarget.heading.velocity().value())
)
);
driveCommandWriter.write(new DriveCommandMessage(command));
TankKinematics.WheelVelocities<Time> wheelVels = kinematics.inverse(command);
double voltage = voltageSensor.getVoltage();
final MotorFeedforward feedforward = new MotorFeedforward(PARAMS.kS,
PARAMS.kV / PARAMS.inPerTick, PARAMS.kA / PARAMS.inPerTick);
double leftPower = feedforward.compute(wheelVels.left) / voltage;
double rightPower = feedforward.compute(wheelVels.right) / voltage;
tankCommandWriter.write(new TankCommandMessage(voltage, leftPower, rightPower));
for (DcMotorEx m : leftMotors) {
m.setPower(leftPower);
}
for (DcMotorEx m : rightMotors) {
m.setPower(rightPower);
}
Canvas c = p.fieldOverlay();
drawPoseHistory(c);
c.setStroke("#4CAF50");
Drawing.drawRobot(c, txWorldTarget.value());
c.setStroke("#3F51B5");
Drawing.drawRobot(c, pose);
c.setStroke("#7C4DFFFF");
c.fillCircle(turn.beginPose.position.x, turn.beginPose.position.y, 2);
return true;
}
@Override
public void preview(Canvas c) {
c.setStroke("#7C4DFF7A");
c.fillCircle(turn.beginPose.position.x, turn.beginPose.position.y, 2);
}
}
public PoseVelocity2d updatePoseEstimate() {
Twist2dDual<Time> twist = localizer.update();
pose = pose.plus(twist.value());
poseHistory.add(pose);
while (poseHistory.size() > 100) {
poseHistory.removeFirst();
}
estimatedPoseWriter.write(new PoseMessage(pose));
return twist.velocity().value();
}
private void drawPoseHistory(Canvas c) {
double[] xPoints = new double[poseHistory.size()];
double[] yPoints = new double[poseHistory.size()];
int i = 0;
for (Pose2d t : poseHistory) {
xPoints[i] = t.position.x;
yPoints[i] = t.position.y;
i++;
}
c.setStrokeWidth(1);
c.setStroke("#3F51B5");
c.strokePolyline(xPoints, yPoints);
}
public TrajectoryActionBuilder actionBuilder(Pose2d beginPose) {
return new TrajectoryActionBuilder(
TurnAction::new,
FollowTrajectoryAction::new,
new TrajectoryBuilderParams(
1e-6,
new ProfileParams(
0.25, 0.1, 1e-2
)
),
beginPose, 0.0,
defaultTurnConstraints,
defaultVelConstraint, defaultAccelConstraint
);
}
}

View File

@ -1,100 +0,0 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.roadrunner.DualNum;
import com.acmerobotics.roadrunner.Time;
import com.acmerobotics.roadrunner.Twist2dDual;
import com.acmerobotics.roadrunner.Vector2d;
import com.acmerobotics.roadrunner.Vector2dDual;
import com.acmerobotics.roadrunner.ftc.Encoder;
import com.acmerobotics.roadrunner.ftc.FlightRecorder;
import com.acmerobotics.roadrunner.ftc.OverflowEncoder;
import com.acmerobotics.roadrunner.ftc.PositionVelocityPair;
import com.acmerobotics.roadrunner.ftc.RawEncoder;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.messages.ThreeDeadWheelInputsMessage;
@Config
public final class ThreeDeadWheelLocalizer implements Localizer {
public static class Params {
public double par0YTicks = 0.0; // y position of the first parallel encoder (in tick units)
public double par1YTicks = 1.0; // y position of the second parallel encoder (in tick units)
public double perpXTicks = 0.0; // x position of the perpendicular encoder (in tick units)
}
public static Params PARAMS = new Params();
public final Encoder par0, par1, perp;
public final double inPerTick;
private int lastPar0Pos, lastPar1Pos, lastPerpPos;
private boolean initialized;
public ThreeDeadWheelLocalizer(HardwareMap hardwareMap, double inPerTick) {
// TODO: make sure your config has **motors** with these names (or change them)
// the encoders should be plugged into the slot matching the named motor
// see https://ftc-docs.firstinspires.org/en/latest/hardware_and_software_configuration/configuring/index.html
par0 = new OverflowEncoder(new RawEncoder(hardwareMap.get(DcMotorEx.class, "par0")));
par1 = new OverflowEncoder(new RawEncoder(hardwareMap.get(DcMotorEx.class, "par1")));
perp = new OverflowEncoder(new RawEncoder(hardwareMap.get(DcMotorEx.class, "perp")));
// TODO: reverse encoder directions if needed
// par0.setDirection(DcMotorSimple.Direction.REVERSE);
this.inPerTick = inPerTick;
FlightRecorder.write("THREE_DEAD_WHEEL_PARAMS", PARAMS);
}
public Twist2dDual<Time> update() {
PositionVelocityPair par0PosVel = par0.getPositionAndVelocity();
PositionVelocityPair par1PosVel = par1.getPositionAndVelocity();
PositionVelocityPair perpPosVel = perp.getPositionAndVelocity();
FlightRecorder.write("THREE_DEAD_WHEEL_INPUTS", new ThreeDeadWheelInputsMessage(par0PosVel, par1PosVel, perpPosVel));
if (!initialized) {
initialized = true;
lastPar0Pos = par0PosVel.position;
lastPar1Pos = par1PosVel.position;
lastPerpPos = perpPosVel.position;
return new Twist2dDual<>(
Vector2dDual.constant(new Vector2d(0.0, 0.0), 2),
DualNum.constant(0.0, 2)
);
}
int par0PosDelta = par0PosVel.position - lastPar0Pos;
int par1PosDelta = par1PosVel.position - lastPar1Pos;
int perpPosDelta = perpPosVel.position - lastPerpPos;
Twist2dDual<Time> twist = new Twist2dDual<>(
new Vector2dDual<>(
new DualNum<Time>(new double[] {
(PARAMS.par0YTicks * par1PosDelta - PARAMS.par1YTicks * par0PosDelta) / (PARAMS.par0YTicks - PARAMS.par1YTicks),
(PARAMS.par0YTicks * par1PosVel.velocity - PARAMS.par1YTicks * par0PosVel.velocity) / (PARAMS.par0YTicks - PARAMS.par1YTicks),
}).times(inPerTick),
new DualNum<Time>(new double[] {
(PARAMS.perpXTicks / (PARAMS.par0YTicks - PARAMS.par1YTicks) * (par1PosDelta - par0PosDelta) + perpPosDelta),
(PARAMS.perpXTicks / (PARAMS.par0YTicks - PARAMS.par1YTicks) * (par1PosVel.velocity - par0PosVel.velocity) + perpPosVel.velocity),
}).times(inPerTick)
),
new DualNum<>(new double[] {
(par0PosDelta - par1PosDelta) / (PARAMS.par0YTicks - PARAMS.par1YTicks),
(par0PosVel.velocity - par1PosVel.velocity) / (PARAMS.par0YTicks - PARAMS.par1YTicks),
})
);
lastPar0Pos = par0PosVel.position;
lastPar1Pos = par1PosVel.position;
lastPerpPos = perpPosVel.position;
return twist;
}
}

View File

@ -1,121 +0,0 @@
package org.firstinspires.ftc.teamcode;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.roadrunner.DualNum;
import com.acmerobotics.roadrunner.Rotation2d;
import com.acmerobotics.roadrunner.Time;
import com.acmerobotics.roadrunner.Twist2dDual;
import com.acmerobotics.roadrunner.Vector2d;
import com.acmerobotics.roadrunner.Vector2dDual;
import com.acmerobotics.roadrunner.ftc.Encoder;
import com.acmerobotics.roadrunner.ftc.FlightRecorder;
import com.acmerobotics.roadrunner.ftc.OverflowEncoder;
import com.acmerobotics.roadrunner.ftc.PositionVelocityPair;
import com.acmerobotics.roadrunner.ftc.RawEncoder;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
import org.firstinspires.ftc.teamcode.messages.TwoDeadWheelInputsMessage;
@Config
public final class TwoDeadWheelLocalizer implements Localizer {
public static class Params {
public double parYTicks = 0.0; // y position of the parallel encoder (in tick units)
public double perpXTicks = 0.0; // x position of the perpendicular encoder (in tick units)
}
public static Params PARAMS = new Params();
public final Encoder par, perp;
public final IMU imu;
private int lastParPos, lastPerpPos;
private Rotation2d lastHeading;
private final double inPerTick;
private double lastRawHeadingVel, headingVelOffset;
private boolean initialized;
public TwoDeadWheelLocalizer(HardwareMap hardwareMap, IMU imu, double inPerTick) {
// TODO: make sure your config has **motors** with these names (or change them)
// the encoders should be plugged into the slot matching the named motor
// see https://ftc-docs.firstinspires.org/en/latest/hardware_and_software_configuration/configuring/index.html
par = new OverflowEncoder(new RawEncoder(hardwareMap.get(DcMotorEx.class, "par")));
perp = new OverflowEncoder(new RawEncoder(hardwareMap.get(DcMotorEx.class, "perp")));
// TODO: reverse encoder directions if needed
// par.setDirection(DcMotorSimple.Direction.REVERSE);
this.imu = imu;
this.inPerTick = inPerTick;
FlightRecorder.write("TWO_DEAD_WHEEL_PARAMS", PARAMS);
}
public Twist2dDual<Time> update() {
PositionVelocityPair parPosVel = par.getPositionAndVelocity();
PositionVelocityPair perpPosVel = perp.getPositionAndVelocity();
YawPitchRollAngles angles = imu.getRobotYawPitchRollAngles();
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.RADIANS);
FlightRecorder.write("TWO_DEAD_WHEEL_INPUTS", new TwoDeadWheelInputsMessage(parPosVel, perpPosVel, angles, angularVelocity));
Rotation2d heading = Rotation2d.exp(angles.getYaw(AngleUnit.RADIANS));
// see https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/617
double rawHeadingVel = angularVelocity.zRotationRate;
if (Math.abs(rawHeadingVel - lastRawHeadingVel) > Math.PI) {
headingVelOffset -= Math.signum(rawHeadingVel) * 2 * Math.PI;
}
lastRawHeadingVel = rawHeadingVel;
double headingVel = headingVelOffset + rawHeadingVel;
if (!initialized) {
initialized = true;
lastParPos = parPosVel.position;
lastPerpPos = perpPosVel.position;
lastHeading = heading;
return new Twist2dDual<>(
Vector2dDual.constant(new Vector2d(0.0, 0.0), 2),
DualNum.constant(0.0, 2)
);
}
int parPosDelta = parPosVel.position - lastParPos;
int perpPosDelta = perpPosVel.position - lastPerpPos;
double headingDelta = heading.minus(lastHeading);
Twist2dDual<Time> twist = new Twist2dDual<>(
new Vector2dDual<>(
new DualNum<Time>(new double[] {
parPosDelta - PARAMS.parYTicks * headingDelta,
parPosVel.velocity - PARAMS.parYTicks * headingVel,
}).times(inPerTick),
new DualNum<Time>(new double[] {
perpPosDelta - PARAMS.perpXTicks * headingDelta,
perpPosVel.velocity - PARAMS.perpXTicks * headingVel,
}).times(inPerTick)
),
new DualNum<>(new double[] {
headingDelta,
headingVel,
})
);
lastParPos = parPosVel.position;
lastPerpPos = perpPosVel.position;
lastHeading = heading;
return twist;
}
}

View File

@ -0,0 +1,7 @@
# Useful Links
## Pedro Pathing
- Pedro Path Generator: `https://pedro-path-generator.vercel.app/`
- Pedro Path Overview: `https://www.youtube.com/watch?v=HI7eyLLpCgM`
- Pedro Tuning Overview: `https://www.youtube.com/watch?v=3EXX5_KwfVM`

View File

@ -0,0 +1,215 @@
package org.firstinspires.ftc.teamcode.cometbots;
import static org.firstinspires.ftc.teamcode.PedroConstants.MAX_POWER;
import com.qualcomm.robotcore.hardware.Gamepad;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.subsystems.ArmSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.ClawSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.DualMotorSliderSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.HangMotorSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.WristSubsystem;
public class CometBotTeleOpDevelopment {
/*
Subsystems
*/
private DualMotorSliderSubsystem dualSlides;
private ClawSubsystem claw;
private WristSubsystem wrist;
private ArmSubsystem arm;
private HangMotorSubsystem skyhook;
/*
Controllers
*/
public Gamepad gamepad1;
public Gamepad gamepad2;
public Gamepad currentGamepad1;
public Gamepad currentGamepad2;
public Gamepad previousGamepad1;
public Gamepad previousGamepad2;
public boolean wristPickup;
public boolean armParked;
public boolean goClaw;
private Follower follower;
public CometBotTeleOpDevelopment(HardwareMap hardwareMap, Gamepad gamepad1, Gamepad gamepad2) {
dualSlides = new DualMotorSliderSubsystem(hardwareMap);
claw = new ClawSubsystem(hardwareMap);
arm = new ArmSubsystem(hardwareMap);
wrist = new WristSubsystem(hardwareMap);
skyhook = new HangMotorSubsystem(hardwareMap);
this.gamepad1 = gamepad1;
this.gamepad2 = gamepad2;
currentGamepad1 = new Gamepad();
currentGamepad2 = new Gamepad();
previousGamepad1 = new Gamepad();
previousGamepad2 = new Gamepad();
follower = new Follower(hardwareMap);
}
public void init() {
dualSlides.init();
claw.init();
wrist.initTeleOp();
arm.initTeleOp();
skyhook.init();
follower.setMaxPower(MAX_POWER);
follower.startTeleopDrive();
wristPickup = false;
armParked = true;
goClaw = false;
}
public void update() {
previousGamepad1.copy(currentGamepad1);
currentGamepad1.copy(gamepad1);
previousGamepad2.copy(currentGamepad2);
currentGamepad2.copy(gamepad2);
//slides
dualSlides.update();
dualSlidesToLowBucketPosition();
dualSlidesToHighBucketPosition();
dualSlidesToFloorPosition();
//arm
armToParkPosition();
armAndWristToFloor();
armToBucketPosition();
//claw
openClaw();
openThumb();
//hang
hang();
follower.setTeleOpMovementVectors(-gamepad1.left_stick_y, -gamepad1.left_stick_x, -gamepad1.right_stick_x);
follower.update();
}
private void openClaw() {
if (currentGamepad2.right_bumper && !previousGamepad2.right_bumper) {
claw.switchState();
}
}
private void openThumb() {
if (currentGamepad2.left_bumper && !previousGamepad2.left_bumper) {
claw.switchTState();
}
}
private void armAndWristToFloor() {
if (currentGamepad2.a && !previousGamepad2.a && wristPickup) {
if (wrist.getState() != WristSubsystem.WristState.FLOOR) {
wrist.toFloorPositionTeleop();
} else if (wristPickup) {
claw.openClaw();
wrist.toPickupPosition();
}
}
if (currentGamepad2.a && !previousGamepad2.a) {
arm.toFloorPositionTeleOp();
wristPickup = true;
armParked = false;
}
}
private void armToBucketPosition() {
if (currentGamepad2.dpad_up && !previousGamepad2.dpad_up) {
armParked = false;
arm.toBucketPosition();
wrist.toBucketPosition();
wristPickup = false;
}
}
private void armToParkPosition() {
if (currentGamepad2.x && !previousGamepad2.x) {
armParked = true;
arm.toParkPosition();
wrist.toFloorPositionTeleop();
wristPickup = false;
}
}
private void dualSlidesToHighBucketPosition() {
if (currentGamepad2.y && !previousGamepad2.y && armParked) {
dualSlides.toHighBucketPosition();
}
}
private void dualSlidesToFloorPosition() {
if (currentGamepad2.dpad_down && !previousGamepad2.dpad_down && armParked) {
dualSlides.toFloorPosition();
}
}
private void dualSlidesToLowBucketPosition() {
if (currentGamepad2.b && !previousGamepad2.b && armParked) {
dualSlides.toLowBucketPosition();
}
}
private void hang(){
if (gamepad1.a) {
claw.grabBlueberry();
arm.setPosition(0.15);
arm.grabBlueberrySkyhook();
//claw Open small amount
wrist.grabBlueberrySkyhook();
//wrist grab in strange way
}
if(gamepad1.b) {
//confirm grab
claw.closeClaw();
}
if (gamepad1.x && claw.getState() == ClawSubsystem.ClawState.CLOSED) {
//now slap on bar, first wrist, then arm, then claw then driver must drive away
dualSlides.toFixedPosition(200);
dualSlides.update();
}
if (gamepad1.y) {
wrist.hangBlueberrySkyhook();
arm.hangBlueberrySkyhook();
try {
Thread.sleep(1500);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
goClaw = true;
dualSlides.toFixedPosition(2100);
dualSlides.update();
}
if (gamepad1.right_bumper) {
claw.openClaw();
if(goClaw == true) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
dualSlides.toFloorPosition();
dualSlides.update();
arm.toParkPosition();
wrist.toPickupPosition();
}
}
skyhook.setPowerForward(-gamepad2.right_stick_y);
}
}

View File

@ -0,0 +1,331 @@
package org.firstinspires.ftc.teamcode.cometbots.paths;
import androidx.annotation.NonNull;
import com.acmerobotics.dashboard.telemetry.TelemetryPacket;
import com.acmerobotics.roadrunner.Action;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.subsystems.ArmSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.ClawSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.DualMotorSliderSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.WristSubsystem;
@Autonomous(name = "Auto Competition V2", group = "A")
public class CometBotDriveV2 extends OpMode {
private Follower follower;
private int state;
private HighBasketPath1 path1;
private HighBasketPath2 path2;
private HighBasketPath3 path3;
private HighBasketPath4 path4;
private HighBasketPath5 path5;
private HighBasketPath6 path6;
//private CometBotTeleopCompetition comp;
private static ElapsedTime runtime;
private static boolean initalized = false;
private static boolean followingPath = false;
private DualMotorSliderSubsystem lift;
private ClawSubsystem claw;
private WristSubsystem wrist;
private ArmSubsystem arm;
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.75);
path1 = new HighBasketPath1();
path2 = new HighBasketPath2();
path3 = new HighBasketPath3();
path4 = new HighBasketPath4();
path5 = new HighBasketPath5();
path6 = new HighBasketPath6();
lift = new DualMotorSliderSubsystem(hardwareMap);
arm = new ArmSubsystem(hardwareMap);
wrist = new WristSubsystem(hardwareMap);
claw = new ClawSubsystem(hardwareMap);
lift.init();
arm.initAuto();
wrist.InitAuto();
claw.init();
//comp = new CometBotTeleopCompetition(hardwareMap, telemetry, gamepad1, gamepad2);
// comp.initCloseClaw();
runtime = new ElapsedTime();
}
public void loop() {
telemetry.addData("state", state);
// telemetry.addData("arm", arm);
// telemetry.addData("lift", lift);
// telemetry.addData("wrist", wrist);
// telemetry.addData("claw", claw);
telemetry.addData("followingPath", followingPath);
telemetry.addData("busy", follower.isBusy());
if (runtime != null) {
telemetry.addData("Runtime (seconds)", runtime.seconds());
}
switch (state) {
case 0:
runtime.reset();
moveToPathOneAndHighBucket();
break;
case 1:
doArmThing();
state = 2;
break;
case 2:
moveToPathTwoAndPickSampleUp();
break;
case 3:
doPickUpThing();
break;
case 4:
moveToBasketPath3();
break;
case 5:
theArmThing();
break;
case 6:
moveToPickupAgainPath4();
break;
case 7:
doPickUpThingAgain();
break;
case 8:
moveToBasketPath5();
break;
case 9:
theArmThingAgain();
break;
case 10:
moveToParkPath6();
break;
case 11:
moveToPark();
break;
case 99:
wrist.toFloorPosition();
break;
default:
telemetry.addLine("default");
}
telemetry.update();
lift.update();
follower.update();
}
public void moveToPark() {
if (runtime.seconds() > 26) {
path6.moveToBasketPath6(follower);
state = 99;
}
}
private void moveToPathOneAndHighBucket() {
path1.moveToPath1(follower);
state = 1;
}
private void moveToBasketPath5() {
if(runtime.seconds() > 19){
path5.moveToBasketPath5(follower);
lift.toHighBucketReverseDrop();
wrist.toTravelPosition();
state = 9;
}
}
public class SetStateAction implements Action {
private int value;
public SetStateAction(int value) {
this.value = value;
}
@Override
public boolean run(@NonNull TelemetryPacket telemetryPacket) {
state = value;
return false;
}
}
private Action setStateValue(int value) {
return new SetStateAction(value);
}
private void doArmThing() {
claw.thumbOutOfTheWay();
wrist.toTravelPosition();
lift.toHighBucketReverseDrop();
//arm.toReverseBucket();
//wrist.toReverseBucket();
//claw.openClaw();
//wrist.toFloorPosition();
}
private void theArmThing() {
if(lift.getFixedPosition() >= 4280){
arm.toReverseBucket();
if(runtime.seconds() > 12) {
wrist.toReverseBucket();
}
if(runtime.seconds() > 12.75) {
claw.autoOpenClaw();
state = 6;
}
}
}
private void theArmThingAgain() {
if(lift.getFixedPosition() >= 4280){
arm.toReverseBucket();
if(runtime.seconds() > 22) {
wrist.toReverseBucket();
}
if(runtime.seconds() > 23.00) {
claw.autoOpenClaw();
state = 10;
}
}
}
private void moveToPathTwoAndPickSampleUp() {
if (runtime.seconds() > 3.25 && runtime.seconds() < 4) {
wrist.toReverseBucket();
}
if (runtime.seconds() > 3.5) {
claw.autoOpenClaw();
}
if (runtime.seconds() > 4) {
wrist.toTravelPosition();
lift.toFloorPosition();
path2.moveToPath2(follower);
state = 3;
}
}
//
private void moveToBasketPath3() {
if (runtime.seconds() > 7.25) {
lift.toHighBucketReverseDrop();
path3.moveToBasketPath3(follower);
state = 5;
}
}
private void moveToPickupAgainPath4() {
if (runtime.seconds() > 13) {
wrist.toTravelPosition();
}
if (runtime.seconds() > 14) {
lift.toFloorPosition();
path4.moveToPickupPath4(follower);
state = 7;
}
}
private void moveToParkPath6() {
if (runtime.seconds() > 24.5) {
arm.toBucketPosition();
wrist.toTravelPosition();
}
if (runtime.seconds() > 25.) {
lift.toFloorPosition();
claw.closeClaw();
state = 11;
}
}
//
// private void moveToBasketPath3() {
// if (!followingPath) {
// path3.moveToBasketPath3(follower);
// followingPath = true;
// }
// if (runtime != null) {
// telemetry.addData("Runtime (seconds)", runtime.seconds());
// if (follower.atParametricEnd() || runtime.seconds() > 10.0) {
// state = 5;
// followingPath = false;
// }
// }
// }
//
// private void thePickUpAuto() {
// Actions.runBlocking(new SequentialAction(
// new SleepAction(.1),
// comp.claw.openClaw(),
// new SleepAction(.2),
// comp.wrist.toPickupPosition(),
// new SleepAction(.2),
// comp.arm.toSubmarinePosition(),
// new SleepAction(.5),
// comp.claw.closeClaw(),
// new SleepAction(.3),
// comp.wrist.toFloorPosition(),
// new SleepAction(.2),
// comp.arm.toParkPosition(),
// new SleepAction(.2)
// ));
// }
// private void thePickUp() {
// Actions.runBlocking(new SequentialAction(
// new SleepAction(.5),
// comp.wrist.toPickupPosition(),
// new SleepAction(.5),
// comp.arm.toSubmarinePosition(),
// new SleepAction(.7),
// comp.claw.closeClaw(),
// new SleepAction(.7),
// comp.wrist.toFloorPosition(),
// new SleepAction(.5),
// comp.arm.toParkPosition(),
// new SleepAction(.5)
// ));
// }
//
private void doPickUpThing() {
wrist.toPickupPosition();
arm.toFloorPosition();
if (runtime.seconds() > 7){
claw.closeClaw();
state = 4;
}
}
private void doPickUpThingAgain() {
wrist.toPickupPosition();
arm.toFloorPosition();
if (runtime.seconds() > 18){
claw.closeClaw();
state = 8;
}
}
}

View File

@ -0,0 +1,366 @@
package org.firstinspires.ftc.teamcode.cometbots.paths;
import static org.firstinspires.ftc.teamcode.PedroConstants.MAX_POWER;
import com.acmerobotics.roadrunner.SequentialAction;
import com.acmerobotics.roadrunner.ftc.Actions;
import com.qualcomm.robotcore.hardware.Gamepad;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.states.FieldStates;
import org.firstinspires.ftc.teamcode.subsystems.ClawSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.ArmSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.DualMotorSliderSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.MotorsSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.WristSubsystem;
public class CometBotTeleopCompetition {
/*
Subsystems
*/
private MotorsSubsystem motors;
private ClawSubsystem claw;
private ArmSubsystem arm;
private WristSubsystem wrist;
private DualMotorSliderSubsystem lift;
/*
Controllers
*/
public Gamepad GP1;
public Gamepad GP2;
public Gamepad currentGP1;
public Gamepad previousGP1;
public Gamepad currentGP2;
public Gamepad previousGP2;
/*
Pedro/FTC Components
*/
private Follower follower;
private Telemetry telemetry;
/*
States
*/
public FieldStates fieldStates;
/*
Configurations
*/
public double currentPower = MAX_POWER;
/*
Misc
*/
private int hbCounter = 0;
public CometBotTeleopCompetition(HardwareMap hardwareMap, Telemetry telemetry, Gamepad gp1, Gamepad gp2) {
this.motors = new MotorsSubsystem(hardwareMap, telemetry, .55);
this.GP1 = gp1;
this.GP2 = gp2;
this.telemetry = telemetry;
this.currentGP1 = new Gamepad();
this.currentGP2 = new Gamepad();
this.previousGP1 = new Gamepad();
this.previousGP2 = new Gamepad();
this.fieldStates = new FieldStates();
this.follower = new Follower(hardwareMap);
claw = new ClawSubsystem(hardwareMap);
arm = new ArmSubsystem(hardwareMap);
wrist = new WristSubsystem(hardwareMap);
lift = new DualMotorSliderSubsystem(hardwareMap);
}
public void init() {
this.motors.init();
claw.init();
arm.initTeleOp();
lift.init();
;
this.fieldStates.setFieldLocation(FieldStates.FieldLocation.TRAVELING);
follower.setMaxPower(MAX_POWER);
follower.startTeleopDrive();
claw.closeClaw();
wrist.initTeleOp();
}
public void initCloseClaw(){
this.motors.init();
this.fieldStates.setFieldLocation(FieldStates.FieldLocation.TRAVELING);
follower.setMaxPower(MAX_POWER);
follower.startTeleopDrive();
}
public void update() {
this.previousGP1.copy(currentGP1);
this.currentGP1.copy(this.GP1);
this.previousGP2.copy(currentGP2);
this.currentGP2.copy(this.GP2);
// this.moveSkyHook();
this.toHighBucketScore();
this.toLowBucketScore();
this.toArmParkPosition();
this.toArmParkThenSwitchBetweenSubmarineAndFloorPosition();
this.clawControl();
// this.decreaseMaxPower();
// this.increaseMaxPower();
// Actions.runBlocking(this.lift.toFloorPosition());
follower.setTeleOpMovementVectors(-this.GP1.left_stick_y, -this.GP1.left_stick_x, -this.GP1.right_stick_x);
follower.update();
lift.update();
this.telemetry.addData("Field State", this.fieldStates.getFieldLocation());
// this.telemetry.addData("Claw State", this.claw.getState());
// this.telemetry.addData("Claw Position", this.claw.getPosition());
// this.telemetry.addData("Wrist State", this.wrist.getState());
// this.telemetry.addData("Arm State", this.arm.getState());
// this.telemetry.addData("Lift State", this.lift.getState());
// this.telemetry.addData("Lift Position", this.lift.getPosition());
this.telemetry.addData("MaxPower", MAX_POWER);
}
/*
Type: PS4 / Logitech
Controller: 1
Button: Left Bumper
Assumption: Working motor mechanism
Action: Decreases maximum speed by -.05
*/
// public void decreaseMaxPower() {
// if (this.currentGP1.left_bumper && !this.previousGP1.left_bumper) {
// this.currentPower = this.currentPower - .05;
// this.follower.setMaxPower(this.currentPower);
// }
// }
/*
Type: PS4 / Logitech
Controller: 1
Button: Left Bumper
Assumption: Working motor mechanism
Action: Increases maximum speed by +.05
*/
// public void increaseMaxPower() {
// if (this.currentGP1.right_bumper && !this.previousGP1.right_bumper) {
// this.currentPower = this.currentPower + .05;
// this.follower.setMaxPower(this.currentPower);
// }
// }
// public void moveSkyHook() {
// if (this.currentGP2.dpad_down) {
// hook.moveSkyHook(-1.00);
// }
// else if (this.currentGP2.dpad_up) {
// hook.moveSkyHook(1.00);
// }
// else{
// hook.moveSkyHook(0.00);
// }
//
// }
/*
Type: PS4 / Logitech
Controller: 2
Button: TRIANGLE / Y
Assumption: Claw is holding specimen, robot is facing buckets ready to score
Action: On button press, enter BUCKET state, arm is lifted up, wrist is lifted up and lift
raises to high bucket. Once at high bucket position, move arm forward, wrist forward
and open claw to drop specimen into bucket. Finally, put arm back up and wrist back up,
retract lift all the way down to floor position and back to TRAVELING state.
*/
public void toHighBucketScore() {
if (this.currentGP2.y && !this.previousGP2.y) {
switch(hbCounter) {
case 0:
lift.toHighBucketPosition();
hbCounter = 1;
break;
case 1:
wrist.toBucketPosition();
arm.toBucketPosition();
hbCounter = 2;
break;
case 2:
claw.openClaw();
hbCounter = 3;
break;
case 3:
claw.closeClaw();
wrist.toTravelPosition();
arm.toParkPosition();
hbCounter = 4;
break;
case 4:
lift.toFloorPosition();
break;
default:
break;
}
hbCounter = 1;
if (lift.getFixedPosition() >= 4450) {
arm.toBucketPosition();
if(arm.getPosition() == 0.45);
wrist.toBucketPosition();
if(wrist.getPosition() == 0.56);
claw.openClaw();
}
}
}
public void highBucketDrop() {
Actions.runBlocking(new SequentialAction(
// new SleepAction(.5),
// this.lift.toHighBucketPosition(),
// new SleepAction(.5),
// this.arm.toBucketPosition(),
// new SleepAction(.5),
// this.wrist.toBucketPosition(),
// new SleepAction(.5),
// this.claw.openClaw(),
// new SleepAction(.5),
// this.wrist.toFloorPosition(),
// new SleepAction(.5),
// this.arm.toParkPosition(),
// this.lift.toFloorPosition(),
// new SleepAction(.5)
));
}
public void highBucketDropAuto() {
Actions.runBlocking(new SequentialAction(
// new SleepAction(.1),
// this.lift.toHighBucketPosition(),
// new SleepAction(.25),
// this.arm.toBucketPosition(),
// new SleepAction(.25),
// this.wrist.toBucketPosition(),
// new SleepAction(.25),
// this.claw.openClaw(),
// new SleepAction(.25),
// this.wrist.toFloorPosition(),
// new SleepAction(.25),
// this.arm.toParkPosition(),
// this.lift.toZeroPosition(),
// new SleepAction(.25)
));
}
/*
Type: PS4 / Logitech
Controller: 2
Button: CIRCLE / B
Assumption: Claw is holding specimen, robot is facing buckets ready to score
Action: On button press, enter BUCKET state, arm is lifted up, wrist is lifted up and lift
raises to low bucket. Once at low bucket position, move arm forward, wrist forward
and open claw to drop specimen into bucket. Finally, put arm back up and wrist back up,
retract lift all the way down to floor position and back to TRAVELING state.
*/
public void toLowBucketScore() {
if (this.currentGP2.b && !this.previousGP2.b) {
lift.toLowBucketPosition();
if (lift.getFixedPosition() >= 1700); {
arm.toBucketPosition();
if(arm.getPosition() == 0.45)
wrist.toBucketPosition();
if(wrist.getPosition() == 0.56)
claw.openClaw();
}
}
}
/*
Type: PS4 / Logitech
Controller: 2
Button: RIGHT BUMPER
Assumption: Working claw mechanism
Action: On button press, claw switches state from OPEN to CLOSE
*/
public void clawControl() {
if (this.currentGP2.right_bumper && !this.previousGP2.right_bumper) {
claw.switchState();
}
}
/*
Type: PS4 / Logitech
Controller: 2
Button: SQUARE / X
Assumption: Working arm mechanism
Action: On button press, pulls arm up and wrist up, ideal for traveling the field when
holding a specimen in claws
*/
public void toArmParkPosition() {
if (this.currentGP2.square && !this.previousGP2.square) {
Actions.runBlocking(new SequentialAction(
// this.wrist.toFloorPosition(),
// this.arm.toParkPosition(),
// this.lift.toFloorPosition()
));
}
}
/*
Type: PS4
Controller: 2
Button: CROSS / A
Assumption: Working claw, arm and wrist mechanisms
Action: On button press, if arm is in PARK (toArmParkPosition), drop the arm to SUBMARINE
position. SUBMARINE position means the arm and wrist are parallel to the floor, raised
3 INCHES off the ground. This state is ideal for moving the arm into the SUBMARINE
area of the field.
When arm is in SUBMARINE position, pressing the button again puts the arm and wrist into
FLOOR state. This angles the arm and wrist down so that it is able to pick specimens
from within the SUBMARINE floor.
*/
public void toArmParkThenSwitchBetweenSubmarineAndFloorPosition() {
if (this.currentGP2.cross && !previousGP2.cross) {
// if (this.arm.getState() == ArmActionsSubsystem.ArmState.PARK) {
// Actions.runBlocking(
// new SequentialAction(
// this.arm.toSubmarinePosition(),
// this.wrist.toFloorPosition()
// )
// );
// } else if (this.arm.getState() == ArmActionsSubsystem.ArmState.SUBMARINE && this.wrist.getState() == WristActionsSubsystem.WristState.FLOOR) {
// Actions.runBlocking(
// new SequentialAction(
//// this.lift.toFloorPosition(),
// this.arm.toSubmarinePosition(),
// this.wrist.toPickupPosition()
// )
// );
// } else if (this.wrist.getState() == WristActionsSubsystem.WristState.PICKUP) {
// Actions.runBlocking(
// new SequentialAction(
// // this.arm.toSubmarinePosition(),
// this.wrist.toFloorPosition()
// )
// );
// }
}
}
}

View File

@ -0,0 +1,26 @@
package org.firstinspires.ftc.teamcode.cometbots.paths;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
@TeleOp(name = "ComeBot Teleop Drive V2", group = "Competition")
@Disabled
public class CometBotTeleopDriveV2 extends OpMode {
public CometBotTeleopCompetition runMode;
@Override
public void init() {
this.runMode = new CometBotTeleopCompetition(hardwareMap, telemetry, gamepad1, gamepad2);
this.runMode.init();
}
@Override
public void loop() {
this.runMode.update();
telemetry.update();
}
}

View File

@ -0,0 +1,333 @@
package org.firstinspires.ftc.teamcode.cometbots.paths;
import androidx.annotation.NonNull;
import com.acmerobotics.dashboard.telemetry.TelemetryPacket;
import com.acmerobotics.roadrunner.Action;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.subsystems.ArmSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.ClawSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.DualMotorSliderSubsystem;
import org.firstinspires.ftc.teamcode.subsystems.WristSubsystem;
@Autonomous(name = "Copy of Auto Competition V2", group = "A")
public class CopyofCometBotDriveV2 extends OpMode {
private Follower follower;
private int state;
private HighBasketPath1 path1;
private HighBasketPath2 path2;
private HighBasketPath3 path3;
private HighBasketPath4 path4;
private HighBasketPath5 path5;
//private HighBasketPath6 path6;
private HighBasketPath6b path6;
//private CometBotTeleopCompetition comp;
private static ElapsedTime runtime;
private static boolean initalized = false;
private static boolean followingPath = false;
private DualMotorSliderSubsystem lift;
private ClawSubsystem claw;
private WristSubsystem wrist;
private ArmSubsystem arm;
@Override
public void init() {
follower = new Follower(hardwareMap);
follower.setMaxPower(.75);
path1 = new HighBasketPath1();
path2 = new HighBasketPath2();
path3 = new HighBasketPath3();
path4 = new HighBasketPath4();
path5 = new HighBasketPath5();
path6 = new HighBasketPath6b();
//path6 = new HighBasketPath6();
lift = new DualMotorSliderSubsystem(hardwareMap);
arm = new ArmSubsystem(hardwareMap);
wrist = new WristSubsystem(hardwareMap);
claw = new ClawSubsystem(hardwareMap);
lift.init();
arm.initAuto();
wrist.InitAuto();
claw.init();
//comp = new CometBotTeleopCompetition(hardwareMap, telemetry, gamepad1, gamepad2);
// comp.initCloseClaw();
runtime = new ElapsedTime();
}
public void loop() {
telemetry.addData("state", state);
// telemetry.addData("arm", arm);
// telemetry.addData("lift", lift);
// telemetry.addData("wrist", wrist);
// telemetry.addData("claw", claw);
telemetry.addData("followingPath", followingPath);
telemetry.addData("busy", follower.isBusy());
if (runtime != null) {
telemetry.addData("Runtime (seconds)", runtime.seconds());
}
switch (state) {
case 0:
runtime.reset();
moveToPathOneAndHighBucket();
break;
case 1:
doArmThing();
state = 2;
break;
case 2:
moveToPathTwoAndPickSampleUp();
break;
case 3:
doPickUpThing();
break;
case 4:
moveToBasketPath3();
break;
case 5:
theArmThing();
break;
case 6:
moveToPickupAgainPath4();
break;
case 7:
doPickUpThingAgain();
break;
case 8:
moveToBasketPath5();
break;
case 9:
theArmThingAgain();
break;
case 10:
moveToParkPath6();
break;
case 11:
moveToPark();
break;
case 99:
wrist.toFloorPosition();
break;
default:
telemetry.addLine("default");
}
telemetry.update();
lift.update();
follower.update();
}
public void moveToPark() {
if (runtime.seconds() > 26) {
path6.moveToBasketPath6(follower);
state = 99;
}
}
private void moveToPathOneAndHighBucket() {
path1.moveToPath1(follower);
state = 1;
}
private void moveToBasketPath5() {
if(runtime.seconds() > 19){
path5.moveToBasketPath5(follower);
lift.toHighBucketReverseDrop();
wrist.toTravelPosition();
state = 9;
}
}
public class SetStateAction implements Action {
private int value;
public SetStateAction(int value) {
this.value = value;
}
@Override
public boolean run(@NonNull TelemetryPacket telemetryPacket) {
state = value;
return false;
}
}
private Action setStateValue(int value) {
return new SetStateAction(value);
}
private void doArmThing() {
claw.thumbOutOfTheWay();
wrist.toTravelPosition();
lift.toHighBucketReverseDrop();
//arm.toReverseBucket();
//wrist.toReverseBucket();
//claw.openClaw();
//wrist.toFloorPosition();
}
private void theArmThing() {
if(lift.getFixedPosition() >= 4280){
arm.toReverseBucket();
if(runtime.seconds() > 12) {
wrist.toReverseBucket();
}
if(runtime.seconds() > 12.75) {
claw.autoOpenClaw();
state = 6;
}
}
}
private void theArmThingAgain() {
if(lift.getFixedPosition() >= 4280){
arm.toReverseBucket();
if(runtime.seconds() > 22) {
wrist.toReverseBucket();
}
if(runtime.seconds() > 23.00) {
claw.autoOpenClaw();
state = 10;
}
}
}
private void moveToPathTwoAndPickSampleUp() {
if (runtime.seconds() > 3.25 && runtime.seconds() < 4) {
wrist.toReverseBucket();
}
if (runtime.seconds() > 3.5) {
claw.autoOpenClaw();
}
if (runtime.seconds() > 4) {
wrist.toTravelPosition();
lift.toFloorPosition();
path2.moveToPath2(follower);
state = 3;
}
}
//
private void moveToBasketPath3() {
if (runtime.seconds() > 7.25) {
lift.toHighBucketReverseDrop();
path3.moveToBasketPath3(follower);
state = 5;
}
}
private void moveToPickupAgainPath4() {
if (runtime.seconds() > 13) {
wrist.toTravelPosition();
}
if (runtime.seconds() > 14) {
lift.toFloorPosition();
path4.moveToPickupPath4(follower);
state = 7;
}
}
private void moveToParkPath6() {
if (runtime.seconds() > 24.5) {
arm.toBucketPosition();
wrist.toTravelPosition();
}
if (runtime.seconds() > 25.) {
lift.toFloorPosition();
claw.closeClaw();
state = 11;
}
}
//
// private void moveToBasketPath3() {
// if (!followingPath) {
// path3.moveToBasketPath3(follower);
// followingPath = true;
// }
// if (runtime != null) {
// telemetry.addData("Runtime (seconds)", runtime.seconds());
// if (follower.atParametricEnd() || runtime.seconds() > 10.0) {
// state = 5;
// followingPath = false;
// }
// }
// }
//
// private void thePickUpAuto() {
// Actions.runBlocking(new SequentialAction(
// new SleepAction(.1),
// comp.claw.openClaw(),
// new SleepAction(.2),
// comp.wrist.toPickupPosition(),
// new SleepAction(.2),
// comp.arm.toSubmarinePosition(),
// new SleepAction(.5),
// comp.claw.closeClaw(),
// new SleepAction(.3),
// comp.wrist.toFloorPosition(),
// new SleepAction(.2),
// comp.arm.toParkPosition(),
// new SleepAction(.2)
// ));
// }
// private void thePickUp() {
// Actions.runBlocking(new SequentialAction(
// new SleepAction(.5),
// comp.wrist.toPickupPosition(),
// new SleepAction(.5),
// comp.arm.toSubmarinePosition(),
// new SleepAction(.7),
// comp.claw.closeClaw(),
// new SleepAction(.7),
// comp.wrist.toFloorPosition(),
// new SleepAction(.5),
// comp.arm.toParkPosition(),
// new SleepAction(.5)
// ));
// }
//
private void doPickUpThing() {
wrist.toPickupPosition();
arm.toFloorPosition();
if (runtime.seconds() > 7){
claw.closeClaw();
state = 4;
}
}
private void doPickUpThingAgain() {
wrist.toPickupPosition();
arm.toFloorPosition();
if (runtime.seconds() > 18){
claw.closeClaw();
state = 8;
}
}
}

View File

@ -0,0 +1,39 @@
package org.firstinspires.ftc.teamcode.cometbots.paths;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/*
AutoLine# - This file does something of a path......
*/
public class HighBasketPath1 {
private final Pose startPose = new Pose(10, 89);
public void moveToPath1(Follower robot) {
PathChain pathChain;
robot.setStartingPose(startPose);
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierLine(
new Point(10.000, 89.000, Point.CARTESIAN),
new Point(20.000, 130.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(320));
pathChain = builder.build();
robot.followPath(pathChain);
}
}

View File

@ -0,0 +1,36 @@
package org.firstinspires.ftc.teamcode.cometbots.paths;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/*
AutoLine# - This file does something of a path......
*/
public class HighBasketPath2 {
public void moveToPath2(Follower robot) {
PathChain pathChain;
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierLine(
new Point(20.000, 130.000, Point.CARTESIAN),
new Point(27.000, 117.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(0));
pathChain = builder.build();
robot.followPath(pathChain);
}
}

View File

@ -0,0 +1,35 @@
package org.firstinspires.ftc.teamcode.cometbots.paths;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/*
AutoLine# - This file does something of a path......
*/
public class HighBasketPath3 {
public void moveToBasketPath3(Follower robot) {
PathChain pathChain;
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierLine(
new Point(26.000, 117.000, Point.CARTESIAN),
new Point(22.000, 132.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(320));
pathChain = builder.build();
robot.followPath(pathChain);
}
}

View File

@ -0,0 +1,35 @@
package org.firstinspires.ftc.teamcode.cometbots.paths;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/*
AutoLine# - This file does something of a path......
*/
public class HighBasketPath4 {
public void moveToPickupPath4(Follower robot) {
PathChain pathChain;
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierLine(
new Point(22.000, 132.000, Point.CARTESIAN),
new Point(27.000, 128.500, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(0));
pathChain = builder.build();
robot.followPath(pathChain);
}
}

View File

@ -0,0 +1,35 @@
package org.firstinspires.ftc.teamcode.cometbots.paths;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/*
AutoLine# - This file does something of a path......
*/
public class HighBasketPath5 {
public void moveToBasketPath5(Follower robot) {
PathChain pathChain;
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierLine(
new Point(27.000, 127.000, Point.CARTESIAN),
new Point(22.000, 132.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(320));
pathChain = builder.build();
robot.followPath(pathChain);
}
}

View File

@ -0,0 +1,37 @@
package org.firstinspires.ftc.teamcode.cometbots.paths;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/*
AutoLine# - This file does something of a path......
*/
public class HighBasketPath6 {
public void moveToBasketPath6(Follower robot) {
PathChain pathChain;
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierCurve(
new Point(27.000, 128.000, Point.CARTESIAN),
new Point(80.000, 130.000, Point.CARTESIAN),
new Point(100.000, 118.000, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(0), Math.toRadians(270));
pathChain = builder.build();
robot.followPath(pathChain);
}
}

View File

@ -0,0 +1,44 @@
package org.firstinspires.ftc.teamcode.cometbots.paths;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierCurve;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.BezierLine;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathBuilder;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.PathChain;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Point;
/*
AutoLine# - This file does something of a path......
*/
public class HighBasketPath6b {
public void moveToBasketPath6(Follower robot) {
PathChain pathChain;
PathBuilder builder = new PathBuilder();
builder
.addPath(
// Line 1
new BezierLine(
new Point(27.000, 128.000, Point.CARTESIAN),
new Point(92.448, 125.671, Point.CARTESIAN)
)
)
.setLinearHeadingInterpolation(Math.toRadians(315), Math.toRadians(270))
.addPath(
// Line 2
new BezierLine(
new Point(92.448, 125.671, Point.CARTESIAN),
new Point(93.000, 94.000, Point.CARTESIAN)
)
)
.setTangentHeadingInterpolation();
pathChain = builder.build();
robot.followPath(pathChain);
}
}

View File

@ -0,0 +1,197 @@
/* Copyright (c) 2021 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode.cometbots.projects;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_ENCODER_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_LEFT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_LEFT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_RIGHT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.BACK_RIGHT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_LEFT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_LEFT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_RIGHT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.FRONT_RIGHT_MOTOR_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.LEFT_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.LEFT_ENCODER_DIRECTION;
import static org.firstinspires.ftc.teamcode.PedroConstants.RIGHT_ENCODER;
import static org.firstinspires.ftc.teamcode.PedroConstants.RIGHT_ENCODER_DIRECTION;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
/*
* This file contains an example of a Linear "OpMode".
* An OpMode is a 'program' that runs in either the autonomous or the teleop period of an FTC match.
* The names of OpModes appear on the menu of the FTC Driver Station.
* When a selection is made from the menu, the corresponding OpMode is executed.
*
* This particular OpMode illustrates driving a 4-motor Omni-Directional (or Holonomic) robot.
* This code will work with either a Mecanum-Drive or an X-Drive train.
* Both of these drives are illustrated at https://gm0.org/en/latest/docs/robot-design/drivetrains/holonomic.html
* Note that a Mecanum drive must display an X roller-pattern when viewed from above.
*
* Also note that it is critical to set the correct rotation direction for each motor. See details below.
*
* Holonomic drives provide the ability for the robot to move in three axes (directions) simultaneously.
* Each motion axis is controlled by one Joystick axis.
*
* 1) Axial: Driving forward and backward Left-joystick Forward/Backward
* 2) Lateral: Strafing right and left Left-joystick Right and Left
* 3) Yaw: Rotating Clockwise and counter clockwise Right-joystick Right and Left
*
* This code is written assuming that the right-side motors need to be reversed for the robot to drive forward.
* When you first test your robot, if it moves backward when you push the left stick forward, then you must flip
* the direction of all 4 motors (see code below).
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
*/
@TeleOp(name="Basic: Omni Linear OpMode", group="Linear OpMode")
public class BasicOmniOpMode_Linear extends LinearOpMode {
// Declare OpMode members for each of the 4 motors.
private final ElapsedTime runtime = new ElapsedTime();
@Override
public void runOpMode() {
// Initialize the hardware variables. Note that the strings used here must correspond
// to the names assigned during the robot configuration step on the DS or RC devices.
DcMotor leftFrontDrive = hardwareMap.get(DcMotor.class, FRONT_LEFT_MOTOR);
DcMotor leftBackDrive = hardwareMap.get(DcMotor.class, BACK_LEFT_MOTOR);
DcMotor rightFrontDrive = hardwareMap.get(DcMotor.class, FRONT_RIGHT_MOTOR);
DcMotor rightBackDrive = hardwareMap.get(DcMotor.class, BACK_RIGHT_MOTOR);
// TODO: replace these with your encoder ports
Encoder leftEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, LEFT_ENCODER));
Encoder rightEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, RIGHT_ENCODER));
Encoder strafeEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, BACK_ENCODER));
// TODO: reverse any encoders necessary
leftEncoder.setDirection(LEFT_ENCODER_DIRECTION);
rightEncoder.setDirection(RIGHT_ENCODER_DIRECTION);
strafeEncoder.setDirection(BACK_ENCODER_DIRECTION);
// ########################################################################################
// !!! IMPORTANT Drive Information. Test your motor directions. !!!!!
// ########################################################################################
// Most robots need the motors on one side to be reversed to drive forward.
// The motor reversals shown here are for a "direct drive" robot (the wheels turn the same direction as the motor shaft)
// If your robot has additional gear reductions or uses a right-angled drive, it's important to ensure
// that your motors are turning in the correct direction. So, start out with the reversals here, BUT
// when you first test your robot, push the left joystick forward and observe the direction the wheels turn.
// Reverse the direction (flip FORWARD <-> REVERSE ) of any wheel that runs backward
// Keep testing until ALL the wheels move the robot forward when you push the left joystick forward.
leftFrontDrive.setDirection(FRONT_LEFT_MOTOR_DIRECTION);
leftBackDrive.setDirection(BACK_LEFT_MOTOR_DIRECTION);
rightFrontDrive.setDirection(FRONT_RIGHT_MOTOR_DIRECTION);
rightBackDrive.setDirection(BACK_RIGHT_MOTOR_DIRECTION);
// Wait for the game to start (driver presses START)
telemetry.addData("Status", "Initialized");
telemetry.addData("Left Encoder Value", leftEncoder.getDeltaPosition());
telemetry.addData("Right Encoder Value", rightEncoder.getDeltaPosition());
telemetry.addData("Strafe Encoder Value", strafeEncoder.getDeltaPosition());
telemetry.update();
waitForStart();
runtime.reset();
// run until the end of the match (driver presses STOP)
while (opModeIsActive()) {
double max;
// POV Mode uses left joystick to go forward & strafe, and right joystick to rotate.
double axial = -gamepad1.left_stick_y; // Note: pushing stick forward gives negative value
double lateral = gamepad1.left_stick_x;
double yaw = gamepad1.right_stick_x;
// Combine the joystick requests for each axis-motion to determine each wheel's power.
// Set up a variable for each drive wheel to save the power level for telemetry.
double leftFrontPower = axial + lateral + yaw;
double rightFrontPower = axial - lateral - yaw;
double leftBackPower = axial - lateral + yaw;
double rightBackPower = axial + lateral - yaw;
// Normalize the values so no wheel power exceeds 100%
// This ensures that the robot maintains the desired motion.
max = Math.max(Math.abs(leftFrontPower), Math.abs(rightFrontPower));
max = Math.max(max, Math.abs(leftBackPower));
max = Math.max(max, Math.abs(rightBackPower));
if (max > 1.0) {
leftFrontPower /= max;
rightFrontPower /= max;
leftBackPower /= max;
rightBackPower /= max;
}
// This is test code:
//
// Uncomment the following code to test your motor directions.
// Each button should make the corresponding motor run FORWARD.
// 1) First get all the motors to take to correct positions on the robot
// by adjusting your Robot Configuration if necessary.
// 2) Then make sure they run in the correct direction by modifying the
// the setDirection() calls above.
// Once the correct motors move in the correct direction re-comment this code.
/*
leftFrontPower = gamepad1.x ? 1.0 : 0.0; // X gamepad
leftBackPower = gamepad1.a ? 1.0 : 0.0; // A gamepad
rightFrontPower = gamepad1.y ? 1.0 : 0.0; // Y gamepad
rightBackPower = gamepad1.b ? 1.0 : 0.0; // B gamepad
*/
// Send calculated power to wheels
leftFrontDrive.setPower(leftFrontPower);
rightFrontDrive.setPower(rightFrontPower);
leftBackDrive.setPower(leftBackPower);
rightBackDrive.setPower(rightBackPower);
// Show the elapsed game time and wheel power.
telemetry.addData("Status", "Run Time: " + runtime.toString());
telemetry.addData("Front left/Right", "%4.2f, %4.2f", leftFrontPower, rightFrontPower);
telemetry.addData("Back left/Right", "%4.2f, %4.2f", leftBackPower, rightBackPower);
telemetry.addData("Left Encoder Value", leftEncoder.getDeltaPosition());
telemetry.addData("Right Encoder Value", rightEncoder.getDeltaPosition());
telemetry.addData("Strafe Encoder Value", strafeEncoder.getDeltaPosition());
telemetry.update();
}
}}

View File

@ -0,0 +1,95 @@
/* Copyright (c) 2021 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode.cometbots.tests;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.Gamepad;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.teamcode.subsystems.ArmSubsystem;
@TeleOp(name = "Arm Test v2", group = "Debug")
public class ArmTest extends LinearOpMode {
// Declare OpMode members for each of the 4 motors.
private final ElapsedTime runtime = new ElapsedTime();
@Override
public void runOpMode() {
/*
* Instantiate Arm
*/
ArmSubsystem arm = new ArmSubsystem(hardwareMap);
/*
* Instantiate gamepad state holders
*/
Gamepad currentGamepad1 = new Gamepad();
Gamepad previousGamepad1 = new Gamepad();
arm.initTeleOp();
waitForStart();
runtime.reset();
// run until the end of the match (driver presses STOP)
while (opModeIsActive()) {
previousGamepad1.copy(currentGamepad1);
currentGamepad1.copy(gamepad1);
if (currentGamepad1.circle && !previousGamepad1.circle) {
arm.toParkPosition();
}
if (currentGamepad1.square && !previousGamepad1.square) {
arm.toBucketPosition();
}
if (currentGamepad1.cross && !previousGamepad1.cross) {
arm.toFloorPosition();
}
if (currentGamepad1.left_bumper && !previousGamepad1.left_bumper) {
arm.setPosition(arm.getPosition() - .05);
}
if (currentGamepad1.right_bumper && !previousGamepad1.right_bumper) {
arm.setPosition(arm.getPosition() + .05);
}
telemetry.addData("Status", "Run Time: " + runtime.toString());
telemetry.addData("Arm State", arm.getState());
telemetry.addData("Arm Position", arm.getPosition());
telemetry.update();
}
}
}

View File

@ -0,0 +1,85 @@
/* Copyright (c) 2021 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode.cometbots.tests;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.Gamepad;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.teamcode.subsystems.ClawSubsystem;
@TeleOp(name = "Claw Test v2", group = "Debug")
public class ClawTest extends LinearOpMode {
// Declare OpMode members for each of the 4 motors.
private final ElapsedTime runtime = new ElapsedTime();
@Override
public void runOpMode() {
/*
* Instantiate Claw
*/
ClawSubsystem claw = new ClawSubsystem(hardwareMap);
/*
* Instantiate gamepad state holders
*/
Gamepad currentGamepad1 = new Gamepad();
Gamepad previousGamepad1 = new Gamepad();
waitForStart();
claw.init();
runtime.reset();
// run until the end of the match (driver presses STOP)
while (opModeIsActive()) {
previousGamepad1.copy(currentGamepad1);
currentGamepad1.copy(gamepad1);
if (currentGamepad1.cross && !previousGamepad1.cross) {
claw.switchState();
}
if (currentGamepad1.triangle && !previousGamepad1.triangle) {
claw.switchTState();
}
// Show the elapsed game time and wheel power.
telemetry.addData("Status", "Run Time: " + runtime.toString());
telemetry.addData("Claw State", claw.getState());
telemetry.addData("Claw Thumb State", claw.getTState());
telemetry.update();
}
}
}

View File

@ -0,0 +1,72 @@
package org.firstinspires.ftc.teamcode.cometbots.tests;
import static org.firstinspires.ftc.teamcode.PedroConstants.LIFT_SLIDE_LEFT_MOTOR;
import static org.firstinspires.ftc.teamcode.PedroConstants.LIFT_SLIDE_RIGHT_MOTOR;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.TelemetryPacket;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.util.ElapsedTime;
@Config
@Autonomous(name = "Lift Motor Subsystem - PID Test")
public class DualMotorSliderTest extends LinearOpMode {
private DcMotorEx liftSlideLeft;
private DcMotorEx liftSlideRight;
public static double kp = 0.002, ki = 0, kd = 0;
private double lastError = 0;
private double integralSum = 0;
public static int targetPosition = 0;
private final FtcDashboard dashboard = FtcDashboard.getInstance();
private ElapsedTime timer = new ElapsedTime();
@Override
public void runOpMode() throws InterruptedException {
TelemetryPacket packet = new TelemetryPacket();
dashboard.setTelemetryTransmissionInterval(25);
liftSlideLeft = hardwareMap.get(DcMotorEx.class, LIFT_SLIDE_LEFT_MOTOR);
liftSlideRight = hardwareMap.get(DcMotorEx.class, LIFT_SLIDE_RIGHT_MOTOR);
liftSlideLeft.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
liftSlideLeft.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
liftSlideLeft.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
liftSlideRight.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
liftSlideRight.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
liftSlideRight.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
waitForStart();
while(opModeIsActive()) {
double power = calculatePower(targetPosition, liftSlideLeft.getCurrentPosition());
packet.put("Power", power);
packet.put("Position", liftSlideLeft.getCurrentPosition());
packet.put("Error", lastError);
packet.put("Seconds", timer.seconds());
liftSlideLeft.setPower(power);
liftSlideRight.setPower(power);
dashboard.sendTelemetryPacket(packet);
}
}
private double calculatePower(int targetPosition, int currentPosition) {
// reference is targetPosition, state is currentPosition
double error = targetPosition - currentPosition;
integralSum += error * timer.seconds();
double derivative = (error - lastError) / timer.seconds();
lastError = error;
timer.reset();
return (error * kp) + (derivative * kd) + (integralSum * ki);
}
}

View File

@ -0,0 +1,92 @@
/* Copyright (c) 2021 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode.cometbots.tests;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.Gamepad;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.teamcode.subsystems.WristSubsystem;
@TeleOp(name = "Wrist Test v2", group = "Debug")
public class WristTest extends LinearOpMode {
// Declare OpMode members for each of the 4 motors.
private final ElapsedTime runtime = new ElapsedTime();
@Override
public void runOpMode() {
/*
* Instantiate Wrist
*/
WristSubsystem wrist = new WristSubsystem(hardwareMap);
/*
* Instantiate gamepad state holders
*/
Gamepad currentGamepad1 = new Gamepad();
Gamepad previousGamepad1 = new Gamepad();
wrist.initTeleOp();
waitForStart();
runtime.reset();
// run until the end of the match (driver presses STOP)
while (opModeIsActive()) {
previousGamepad1.copy(currentGamepad1);
currentGamepad1.copy(gamepad1);
if (currentGamepad1.square && !previousGamepad1.square) {
wrist.toBucketPosition();
}
if (currentGamepad1.cross && !previousGamepad1.cross) {
wrist.switchState();
}
if (currentGamepad1.left_bumper && !previousGamepad1.left_bumper) {
wrist.setPosition(wrist.getPosition() - .05);
}
if (currentGamepad1.right_bumper && !previousGamepad1.right_bumper) {
wrist.setPosition(wrist.getPosition() + .05);
}
// Show the elapsed game time and wheel power.
telemetry.addData("Status", "Run Time: " + runtime.toString());
telemetry.addData("Wrist State", wrist.getState());
telemetry.addData("Wrist Position", wrist.getPosition());
telemetry.update();
}
}
}

View File

@ -0,0 +1,59 @@
package org.firstinspires.ftc.teamcode.configs;
import com.acmerobotics.dashboard.config.Config;
@Config
public class RobotConstants {
public final static double clawClose = 0.95;
public final static double clawOpen = 0.35;
public final static int autoClawOpen = 0;
public final static double armFloor = 0.7;
public final static double armSubmarine = 0.55;
public final static double armReverseBucket = 0.08;
public final static double armPark = 0.33;
//value for grabbing the hook Specimen
public final static double grabBlueberry = 0.56;
public final static double armGrabBlueberrySkyhook = 0.045;
public final static double wristGrabBlueberrySkyhook = 0.08;
public final static double armHangBlueberrySkyhook = 0.18;
public final static double wristHangBlueberrySkyhook = 0;
public final static int slideHangBlueberrySkyhook = 500;
public final static int slideBelowFloor = -150;
public final static int slideSpecimanHang = 900;
public final static int slideSpecimanRelease = 200;
public final static int backwardBucketDrop = 4670;
public final static double armBucket = 0.45;
public final static double armSpecimen = 0.155;
public final static double armInit = 0.13;
public final static double wristInit = 0;
public final static double wristPickup = 0.425;
public final static double wristBucket = 0.56;
public final static double wristSpecimenPrep = 0.63;
public final static double wristSpecimenHang = 0.53;
public final static double wristFloor = 0.75;
public final static double wristBackwardBucket = 0.725;
public final static double wristRung = 0.55;
public final static double wristSpeciemen = 0.1;
public final static int toBar = -7000;
public final static double wristtravel = 0.525;
public final static int toFloor = 0;
public final static int liftToFloorPos = 350;
public final static int liftToSubmarinePos = 350;
public final static int liftToLowBucketPos = 1750;
public final static int liftToHighRung = 2100;
public final static int dropToHighRung = 1675;
public final static int liftToHighRungAttach = 1050;
public final static int liftToHighBucketPos = 4500;
public final static double liftPower = 1;
}

View File

@ -1,24 +0,0 @@
package org.firstinspires.ftc.teamcode.messages;
import com.acmerobotics.roadrunner.PoseVelocity2dDual;
import com.acmerobotics.roadrunner.Time;
public final class DriveCommandMessage {
public long timestamp;
public double forwardVelocity;
public double forwardAcceleration;
public double lateralVelocity;
public double lateralAcceleration;
public double angularVelocity;
public double angularAcceleration;
public DriveCommandMessage(PoseVelocity2dDual<Time> poseVelocity) {
this.timestamp = System.nanoTime();
this.forwardVelocity = poseVelocity.linearVel.x.get(0);
this.forwardAcceleration = poseVelocity.linearVel.x.get(1);
this.lateralVelocity = poseVelocity.linearVel.y.get(0);
this.lateralAcceleration = poseVelocity.linearVel.y.get(1);
this.angularVelocity = poseVelocity.angVel.get(0);
this.angularAcceleration = poseVelocity.angVel.get(1);
}
}

View File

@ -1,19 +0,0 @@
package org.firstinspires.ftc.teamcode.messages;
public final class MecanumCommandMessage {
public long timestamp;
public double voltage;
public double leftFrontPower;
public double leftBackPower;
public double rightBackPower;
public double rightFrontPower;
public MecanumCommandMessage(double voltage, double leftFrontPower, double leftBackPower, double rightBackPower, double rightFrontPower) {
this.timestamp = System.nanoTime();
this.voltage = voltage;
this.leftFrontPower = leftFrontPower;
this.leftBackPower = leftBackPower;
this.rightBackPower = rightBackPower;
this.rightFrontPower = rightFrontPower;
}
}

View File

@ -1,30 +0,0 @@
package org.firstinspires.ftc.teamcode.messages;
import com.acmerobotics.roadrunner.ftc.PositionVelocityPair;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
public final class MecanumLocalizerInputsMessage {
public long timestamp;
public PositionVelocityPair leftFront;
public PositionVelocityPair leftBack;
public PositionVelocityPair rightBack;
public PositionVelocityPair rightFront;
public double yaw;
public double pitch;
public double roll;
public MecanumLocalizerInputsMessage(PositionVelocityPair leftFront, PositionVelocityPair leftBack, PositionVelocityPair rightBack, PositionVelocityPair rightFront, YawPitchRollAngles angles) {
this.timestamp = System.nanoTime();
this.leftFront = leftFront;
this.leftBack = leftBack;
this.rightBack = rightBack;
this.rightFront = rightFront;
{
this.yaw = angles.getYaw(AngleUnit.RADIANS);
this.pitch = angles.getPitch(AngleUnit.RADIANS);
this.roll = angles.getRoll(AngleUnit.RADIANS);
}
}
}

View File

@ -1,18 +0,0 @@
package org.firstinspires.ftc.teamcode.messages;
import com.acmerobotics.roadrunner.Pose2d;
public final class PoseMessage {
public long timestamp;
public double x;
public double y;
public double heading;
public PoseMessage(Pose2d pose) {
this.timestamp = System.nanoTime();
this.x = pose.position.x;
this.y = pose.position.y;
this.heading = pose.heading.toDouble();
}
}

View File

@ -1,15 +0,0 @@
package org.firstinspires.ftc.teamcode.messages;
public final class TankCommandMessage {
public long timestamp;
public double voltage;
public double leftPower;
public double rightPower;
public TankCommandMessage(double voltage, double leftPower, double rightPower) {
this.timestamp = System.nanoTime();
this.voltage = voltage;
this.leftPower = leftPower;
this.rightPower = rightPower;
}
}

View File

@ -1,17 +0,0 @@
package org.firstinspires.ftc.teamcode.messages;
import com.acmerobotics.roadrunner.ftc.PositionVelocityPair;
import java.util.List;
public final class TankLocalizerInputsMessage {
public long timestamp;
public PositionVelocityPair[] left;
public PositionVelocityPair[] right;
public TankLocalizerInputsMessage(List<PositionVelocityPair> left, List<PositionVelocityPair> right) {
this.timestamp = System.nanoTime();
this.left = left.toArray(new PositionVelocityPair[0]);
this.right = right.toArray(new PositionVelocityPair[0]);
}
}

View File

@ -1,17 +0,0 @@
package org.firstinspires.ftc.teamcode.messages;
import com.acmerobotics.roadrunner.ftc.PositionVelocityPair;
public final class ThreeDeadWheelInputsMessage {
public long timestamp;
public PositionVelocityPair par0;
public PositionVelocityPair par1;
public PositionVelocityPair perp;
public ThreeDeadWheelInputsMessage(PositionVelocityPair par0, PositionVelocityPair par1, PositionVelocityPair perp) {
this.timestamp = System.nanoTime();
this.par0 = par0;
this.par1 = par1;
this.perp = perp;
}
}

View File

@ -1,35 +0,0 @@
package org.firstinspires.ftc.teamcode.messages;
import com.acmerobotics.roadrunner.ftc.PositionVelocityPair;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
public final class TwoDeadWheelInputsMessage {
public long timestamp;
public PositionVelocityPair par;
public PositionVelocityPair perp;
public double yaw;
public double pitch;
public double roll;
public double xRotationRate;
public double yRotationRate;
public double zRotationRate;
public TwoDeadWheelInputsMessage(PositionVelocityPair par, PositionVelocityPair perp, YawPitchRollAngles angles, AngularVelocity angularVelocity) {
this.timestamp = System.nanoTime();
this.par = par;
this.perp = perp;
{
this.yaw = angles.getYaw(AngleUnit.RADIANS);
this.pitch = angles.getPitch(AngleUnit.RADIANS);
this.roll = angles.getRoll(AngleUnit.RADIANS);
}
{
this.xRotationRate = angularVelocity.xRotationRate;
this.yRotationRate = angularVelocity.yRotationRate;
this.zRotationRate = angularVelocity.zRotationRate;
}
}
}

View File

@ -0,0 +1,278 @@
## Overview
This is the localization system developed for the Pedro Pathing path follower. These localizers use
the pose exponential method of localization. It's basically a way of turning movements from the
robot's coordinate frame to the global coordinate frame. If you're interested in reading more about
it, then check out pages 177 - 183 of [Controls Engineering in the FIRST Robotics Competition](https://file.tavsys.net/control/controls-engineering-in-frc.pdf)
by Tyler Veness. However, the OTOS localizer uses its own onboard system for calculating localization,
which we do not know about.
## Setting Your Localizer
Go to line `73` in the `PoseUpdater` class, and replace the `new ThreeWheelLocalizer(hardwareMap)`
with the localizer that applies to you:
* If you're using drive encoders, put `new DriveEncoderLocalizer(hardwareMap)`
* If you're using two wheel odometry, put `new TwoWheelLocalizer(hardwareMap)`
* If you're using three wheel odometry, put `new ThreeWheelLocalizer(hardwareMap)`, so basically
don't change it from the default
* If you're using three wheel odometry with the IMU, put `new ThreeWheelIMULocalizer(hardwareMap)`
* If you're using OTOS, put `new OTOSLocalizer(hardwareMap)`
* If you're using Pinpoint, put `new PinpointLocalizer(hardwareMap)`
## Tuning
To start, you'll want to select your localizer of choice. Below, I'll have instructions for the drive
encoder localizer, two tracking wheel localizer, the three tracking wheel localizer, the three
wheel with IMU localizer, and the OTOS localizer offered in Pedro Pathing. Scroll down to the section
that applies to you and follow the directions there.
# Drive Encoders
* First, you'll need all of your drive motors to have encoders attached.
* Then, go to `DriveEncoderLocalizer.java`. The motor names are already set, so you don't have to do
anything to change the encoder names there.
* Then, reverse the direction of any encoders so that all encoders tick up when the robot is moving forward.
* Now, you'll have to tune the multipliers. These convert your measurements from encoder ticks into
inches or radians, essentially scaling your localizer so that your numbers are accurate to the real
world.
* First, start with the `Turn Localizer Tuner`. You'll want to position your robot to be facing
in a direction you can easily find again, like lining up an edge of the robot against a field tile edge.
By default, you should spin the robot for one rotation going counterclockwise. Once you've spun
exactly that one rotation, or whatever you set that value to, then the turn multiplier will be shown
as the second number shown. The first number is how far the robot thinks you've spun, and the second
number is the multiplier you need to have to scale your current readings to your goal of one rotation,
or the custom set angle. Feel free to run a few more tests and average the results. Once you have
this multiplier, then replace `TURN_TICKS_TO_RADIANS` in the localizer with your multiplier. Make sure
you replace the number, not add on or multiply it to the previous number. The tuner takes into
account your current multiplier.
* Next, go on to `Forward Localizer Tuner`. You'll want to position a ruler alongside your robot.
By default, you'll want to push the robot 30 inches forward. Once you've pushed that far, or whatever
you set that value to, then the forward multiplier will be shown as the second number shown. The
first number is how far the robot thinks you've gone, and the second number is the multiplier you
need to have to scale your current readings to your goal of 30 inches, or the custom set distance.
Feel free to run a few more tests and average the results. Once you have this multiplier, then
replace `FORWARD_TICKS_TO_INCHES` in the localizer with your multiplier. Make sure you replace the number,
not add on or multiply it to the previous number. The tuner takes into account your current multiplier.
* Finally, go to `Lateral Localizer Tuner`. You'll want to position a ruler alongside your robot.
By default, you'll want to push the robot 30 inches to the right. Once you've pushed that far, or whatever
you set that value to, then the lateral multiplier will be shown as the second number shown. The
first number is how far the robot thinks you've gone, and the second number is the multiplier you
need to have to scale your current readings to your goal of 30 inches, or the custom set distance.
Feel free to run a few more tests and average the results. Once you have this multiplier, then
replace `STRAFE_TICKS_TO_INCHES` in the localizer with your multiplier. Make sure you replace the number,
not add on or multiply it to the previous number. The tuner takes into account your current multiplier.
* Once you're done with all this, your localizer should be tuned. To test it out, you can go to
`Localization Test` and push around or drive around your robot. Go to [FTC Dashboard](http://192.168.43.1:8080/dash)
and on the top right, switch the drop down from the default view to the field view. Then, on the bottom
left corner, you should see a field and the robot being drawn on the field. You can then move your
robot around and see if the movements look accurate on FTC Dashboard. If they don't, then you'll
want to re-run some of the previous steps. Otherwise, congrats on tuning your localizer!
# Two Wheel Localizer
* First, you'll need a Control Hub with a working IMU, and two odometry wheels connected to motor
encoder ports on a hub.
* Then, go to `TwoWheelLocalizer.java`. First, in the constructor, enter in the positions of your
tracking wheels relative to the center of the wheels of the robot. The positions are in inches, so
convert measurements accordingly. Use the comment above the class declaration to help you with the
coordinates.
* Next, go to where it tells you to replace the current statements with your encoder ports in the constructor.
Replace the `deviceName` parameter with the name of the port that the encoder is connected to. The
variable names correspond to which tracking wheel should be connected.
* After that, go to the instantiation of the IMU and change the orientation of the IMU to match that
of your robot's.
* Then, reverse the direction of any encoders so that the forward encoder ticks up when the robot
is moving forward and the strafe encoder ticks up when the robot moves right.
* Now, you'll have to tune the multipliers. These convert your measurements from encoder ticks into
inches or radians, essentially scaling your localizer so that your numbers are accurate to the real
world.
* You actually won't need the turning tuner for this one, since the IMU in the Control Hub will take
care of the heading readings.
* First, start with the `Forward Localizer Tuner`. You'll want to position a ruler alongside your robot.
By default, you'll want to push the robot 30 inches forward. Once you've pushed that far, or whatever
you set that value to, then the forward multiplier will be shown as the second number shown. The
first number is how far the robot thinks you've gone, and the second number is the multiplier you
need to have to scale your current readings to your goal of 30 inches, or the custom set distance.
Feel free to run a few more tests and average the results. Once you have this multiplier, then
replace `FORWARD_TICKS_TO_INCHES` in the localizer with your multiplier. Make sure you replace the number,
not add on or multiply it to the previous number. The tuner takes into account your current multiplier.
* Finally, go to `Lateral Localizer Tuner`. You'll want to position a ruler alongside your robot.
By default, you'll want to push the robot 30 inches to the right. Once you've pushed that far, or whatever
you set that value to, then the lateral multiplier will be shown as the second number shown. The
first number is how far the robot thinks you've gone, and the second number is the multiplier you
need to have to scale your current readings to your goal of 30 inches, or the custom set distance.
Feel free to run a few more tests and average the results. Once you have this multiplier, then
replace `STRAFE_TICKS_TO_INCHES` in the localizer with your multiplier. Make sure you replace the number,
not add on or multiply it to the previous number. The tuner takes into account your current multiplier.
* Once you're done with all this, your localizer should be tuned. To test it out, you can go to
`Localization Test` and push around or drive around your robot. Go to [FTC Dashboard](http://192.168.43.1:8080/dash)
and on the top right, switch the drop down from the default view to the field view. Then, on the bottom
left corner, you should see a field and the robot being drawn on the field. You can then move your
robot around and see if the movements look accurate on FTC Dashboard. If they don't, then you'll
want to re-run some of the previous steps. Otherwise, congrats on tuning your localizer!
# Three Wheel Localizer
* First, you'll need three odometry wheels connected to motor encoder ports on a hub.
* Then, go to `ThreeWheelLocalizer.java`. First, in the constructor, enter in the positions of your
tracking wheels relative to the center of the wheels of the robot. The positions are in inches, so
convert measurements accordingly. Use the comment above the class declaration to help you with the
coordinates.
* Next, go to where it tells you to replace the current statements with your encoder ports in the constructor.
Replace the `deviceName` parameter with the name of the port that the encoder is connected to. The
variable names correspond to which tracking wheel should be connected.
* Then, reverse the direction of any encoders so that the forward encoders tick up when the robot
is moving forward and the strafe encoder ticks up when the robot moves right.
* First, start with the `Turn Localizer Tuner`. You'll want to position your robot to be facing
in a direction you can easily find again, like lining up an edge of the robot against a field tile edge.
By default, you should spin the robot for one rotation going counterclockwise. Once you've spun
exactly that one rotation, or whatever you set that value to, then the turn multiplier will be shown
as the second number shown. The first number is how far the robot thinks you've spun, and the second
number is the multiplier you need to have to scale your current readings to your goal of one rotation,
or the custom set angle. Feel free to run a few more tests and average the results. Once you have
this multiplier, then replace `TURN_TICKS_TO_RADIANS` in the localizer with your multiplier. Make sure
you replace the number, not add on or multiply it to the previous number. The tuner takes into
account your current multiplier.
* Next, go on to `Forward Localizer Tuner`. You'll want to position a ruler alongside your robot.
By default, you'll want to push the robot 30 inches forward. Once you've pushed that far, or whatever
you set that value to, then the forward multiplier will be shown as the second number shown. The
first number is how far the robot thinks you've gone, and the second number is the multiplier you
need to have to scale your current readings to your goal of 30 inches, or the custom set distance.
Feel free to run a few more tests and average the results. Once you have this multiplier, then
replace `FORWARD_TICKS_TO_INCHES` in the localizer with your multiplier. Make sure you replace the number,
not add on or multiply it to the previous number. The tuner takes into account your current multiplier.
* Finally, go to `Lateral Localizer Tuner`. You'll want to position a ruler alongside your robot.
By default, you'll want to push the robot 30 inches to the right. Once you've pushed that far, or whatever
you set that value to, then the lateral multiplier will be shown as the second number shown. The
first number is how far the robot thinks you've gone, and the second number is the multiplier you
need to have to scale your current readings to your goal of 30 inches, or the custom set distance.
Feel free to run a few more tests and average the results. Once you have this multiplier, then
replace `STRAFE_TICKS_TO_INCHES` in the localizer with your multiplier. Make sure you replace the number,
not add on or multiply it to the previous number. The tuner takes into account your current multiplier.
* Once you're done with all this, your localizer should be tuned. To test it out, you can go to
`Localization Test` and push around or drive around your robot. Go to [FTC Dashboard](http://192.168.43.1:8080/dash)
and on the top right, switch the drop down from the default view to the field view. Then, on the bottom
left corner, you should see a field and the robot being drawn on the field. You can then move your
robot around and see if the movements look accurate on FTC Dashboard. If they don't, then you'll
want to re-run some of the previous steps. Otherwise, congrats on tuning your localizer!
# Three Wheel Localizer with IMU
* First, you'll need three odometry wheels connected to motor encoder ports on a hub.
* Then, go to `ThreeWheelIMULocalizer.java`. First, in the constructor, enter in the positions of your
tracking wheels relative to the center of the wheels of the robot. The positions are in inches, so
convert measurements accordingly. Use the comment above the class declaration to help you with the
coordinates.
* Next, go to where it tells you to replace the current statements with your encoder ports in the constructor.
Replace the `deviceName` parameter with the name of the port that the encoder is connected to. The
variable names correspond to which tracking wheel should be connected.
* After that, go to the instantiation of the IMU and change the orientation of the IMU to match that
of your robot's.
* Then, reverse the direction of any encoders so that the forward encoders tick up when the robot
is moving forward and the strafe encoder ticks up when the robot moves right.
* Although heading localization is done mostly through the IMU, the tracking wheels are still used for
small angle adjustments for better stability. So, you will still need to tune your turning multiplier.
* First, start with the `Turn Localizer Tuner`. Before doing any tuning, go to FTC Dashboard and find
the `ThreeWheelIMULocalizer` dropdown and deselect `useIMU`. You'll want to position your robot to be facing
in a direction you can easily find again, like lining up an edge of the robot against a field tile edge.
By default, you should spin the robot for one rotation going counterclockwise. Once you've spun
exactly that one rotation, or whatever you set that value to, then the turn multiplier will be shown
as the second number shown. The first number is how far the robot thinks you've spun, and the second
number is the multiplier you need to have to scale your current readings to your goal of one rotation,
or the custom set angle. Feel free to run a few more tests and average the results. Once you have
this multiplier, then replace `TURN_TICKS_TO_RADIANS` in the localizer with your multiplier. Make sure
you replace the number, not add on or multiply it to the previous number. The tuner takes into
account your current multiplier.
* Next, go on to `Forward Localizer Tuner`. You should re-enable `useIMU` at this time. You'll want to position a ruler alongside your robot.
By default, you'll want to push the robot 30 inches forward. Once you've pushed that far, or whatever
you set that value to, then the forward multiplier will be shown as the second number shown. The
first number is how far the robot thinks you've gone, and the second number is the multiplier you
need to have to scale your current readings to your goal of 30 inches, or the custom set distance.
Feel free to run a few more tests and average the results. Once you have this multiplier, then
replace `FORWARD_TICKS_TO_INCHES` in the localizer with your multiplier. Make sure you replace the number,
not add on or multiply it to the previous number. The tuner takes into account your current multiplier.
* Finally, go to `Lateral Localizer Tuner`. `useIMU` should be enabled for this step. You'll want to position a ruler alongside your robot.
By default, you'll want to push the robot 30 inches to the right. Once you've pushed that far, or whatever
you set that value to, then the lateral multiplier will be shown as the second number shown. The
first number is how far the robot thinks you've gone, and the second number is the multiplier you
need to have to scale your current readings to your goal of 30 inches, or the custom set distance.
Feel free to run a few more tests and average the results. Once you have this multiplier, then
replace `STRAFE_TICKS_TO_INCHES` in the localizer with your multiplier. Make sure you replace the number,
not add on or multiply it to the previous number. The tuner takes into account your current multiplier.
* Once you're done with all this, your localizer should be tuned. Make sure that `useIMU` is turned back on. To test it out, you can go to
`Localization Test` and push around or drive around your robot. Go to [FTC Dashboard](http://192.168.43.1:8080/dash)
and on the top right, switch the drop down from the default view to the field view. Then, on the bottom
left corner, you should see a field and the robot being drawn on the field. You can then move your
robot around and see if the movements look accurate on FTC Dashboard. If they don't, then you'll
want to re-run some of the previous steps. Otherwise, congrats on tuning your localizer!
# OTOS Localizer
* First, you'll need the OTOS connected to an I2C port on a hub. Make sure the film on the sensor is removed.
* Then, go to `OTOSLocalizer.java`. First, in the constructor, go to where it tells you to replace
the current statement with your OTOS port in the constructor. Replace the `deviceName` parameter
with the name of the port that the OTOS is connected to.
* Next, enter in the position of your OTOS relative to the center of the wheels of the robot. The
positions are in inches, so convert measurements accordingly. Use the comment above the class
declaration as well as to help you with the coordinates.
* First, start with the `Turn Localizer Tuner`. You'll want to position your robot to be facing
in a direction you can easily find again, like lining up an edge of the robot against a field tile edge.
By default, you should spin the robot for one rotation going counterclockwise. Once you've spun
exactly that one rotation, or whatever you set that value to, then the angular scalar will be shown
as the second number shown. The first number is how far the robot thinks you've spun, and the second
number is the scalar you need to have to scale your current readings to your goal of one rotation,
or the custom set angle. Feel free to run a few more tests and average the results. Once you have
this scalar, then replace the angular scalar on line `78` in the localizer with your scalar.
Make sure you replace the number, not add on or multiply it to the previous number. The tuner takes into
account your current angular scalar.
* For this next step, since OTOS only has one linear scalar, you can run either the forward or lateral
localizer tuner and the result should be the same. So, you choose which one you want to run.
* Option 1: go on to `Forward Localizer Tuner`. You'll want to position a ruler alongside your robot.
By default, you'll want to push the robot 30 inches forward. Once you've pushed that far, or whatever
you set that value to, then the linear scalar will be shown as the second number shown. The
first number is how far the robot thinks you've gone, and the second number is the scalar you
need to have to scale your current readings to your goal of 30 inches, or the custom set distance.
Feel free to run a few more tests and average the results. Once you have this scalar, then
replace the linear scalar on line `77` in the localizer with your scalar. Make sure you replace the number,
not add on or multiply it to the previous number. The tuner takes into account your current scalar.
* Option 2: go to `Lateral Localizer Tuner`. You'll want to position a ruler alongside your robot.
By default, you'll want to push the robot 30 inches to the right. Once you've pushed that far, or whatever
you set that value to, then the linear scalar will be shown as the second number shown. The
first number is how far the robot thinks you've gone, and the second number is the scalar you
need to have to scale your current readings to your goal of 30 inches, or the custom set distance.
Feel free to run a few more tests and average the results. Once you have this scalar, then
replace the linear scalar on line `77` in the localizer with your scalar. Make sure you replace the number,
not add on or multiply it to the previous number. The tuner takes into account your current scalar.
* Once you're done with all this, your localizer should be tuned. To test it out, you can go to
`Localization Test` and push around or drive around your robot. Go to [FTC Dashboard](http://192.168.43.1:8080/dash)
and on the top right, switch the drop down from the default view to the field view. Then, on the bottom
left corner, you should see a field and the robot being drawn on the field. You can then move your
robot around and see if the movements look accurate on FTC Dashboard. If they don't, then you'll
want to re-run some of the previous steps. Otherwise, congrats on tuning your localizer!
## Pinpoint Localizer
* First you will need to plug in the pinpoint to the i2c port. Ensure that the dead wheel encoder wires are
plugged into the proper ports on the pinpoint to ensure no error within the tuning steps.
* Then, go to the `PinpointLocalier.java` file and go to where it tells you to replace
the current statement with your pinpoint port in the constructor. Replace the `deviceName` parameter
with the name of the port that the pinpoint is connected to.
* Next, follow the instructions left by the TODO: comment and enter in the odometry measurements either in
mms or inches (We have the conversion rates listed).
* First, to ensure that your pinpoint is properly connected, please run the `SensorGoBildaPinpointExample.java`
file left in the `tuning` folder located within `localization`.
* Once completed, the localizer should be properly tuned. To test it out, you can go to
`Localization Test` and push around or drive around your robot. Go to [FTC Dashboard](http://192.168.43.1:8080/dash)
and on the top right, switch the drop down from the default view to the field view. Then, on the bottom
left corner, you should see a field and the robot being drawn on the field. You can then move your
robot around and see if the movements look accurate on FTC Dashboard. If they don't, then you'll
want to re-run some of the previous steps. Otherwise, congrats on tuning your localizer!
## Using Road Runner's Localizer
Of course, many teams have experience using Road Runner in the past and so have localizers from Road
Runner that are tuned. There is an adapter for the Road Runner three wheel localizer to the Pedro
Pathing localization system in Pedro Pathing, but it is commented out by default to reduce the number
of imports in gradle.
To re-enable it, go to `RoadRunnerEncoder.java`, `RoadRunnerThreeWheelLocalizer.java`, and `RRToPedroThreeWheelLocalizer.java`
and hit `ctrl` + `a` to select everything within the files. Then, press `ctrl` + `/` to uncomment the code.
Afterwards, go to `build.gradle` file under the `teamcode` folder and add the following dependencies:
```
implementation 'org.apache.commons:commons-math3:3.6.1'
implementation 'com.acmerobotics.com.roadrunner:core:0.5.6'
```
After that, you should be good to go. If you want to use a different localizer from Road Runner, then
you can adapt it in the same process that's used for the Road Runner three wheel localizer.

View File

@ -0,0 +1,109 @@
## Basic Ideas
Pedro Pathing is a reactive vector based follower. What this means is that the robot dynamically
calculates a set of vectors that are required to correct error as well as to move forward and applies them.
The robot calculates:
* centripetal force correction
* translational correction
* heading correction
* drive vector
These are then applied to the robot in this order until either the robot's power is maxed out or all
the vectors are applied.
## Why Pedro Pathing?
Why use Pedro Pathing? Why not something else like Road Runner or Pure Pursuit?
* Why not Pure Pursuit?
* Pure Pursuit searches for the farthest point on the path that's within a certain radius from the robot. Pure Pursuit will then go in a straight line to that point. This poses several problems, as a small search radius will cause some oscillations on corners, and a large search radius will cut corners on paths, which makes the paths inaccurate to real life.
* Pedro Pathing instead corrects to the closest point on the path while still following the path. This ensures that the follower will stay on the path while still being able to move forward along the path without cutting corners or encountering oscillation issues.
* Why not Road Runner?
* Road Runner is a motion profile based follower, which means that a set of instructions for motor powers are calculated for each path beforehand and then run. During this motion profile, Road Runner can struggle to correct. This can be sufficient for many situations, but if the robot encounters an obstacle or wheel slippage, it may be unable to correct in time.
* Pedro Pathing instead dynamically corrects throughout the path. The movement vectors are calculated at every point along the path, and because of this, the path can even be changed midway through and Pedro Pathing will still be able to correct. Since correction occurs throughout the path, the error correction isn't concentrated on the end of the path and therefore the robot is able to better minimize error.
## How Does Pedro Path?
As mentioned in the *Basic Ideas* section, Pedro Pathing calculates a set of vectors to move the
robot along a path, which is defined with Bezier curves. Here, we'll go more in-depth on how these
vectors are calculated and used in path following.
### The Hierarchy
While following paths, sometimes all these motion vectors are demanding more power from the robot
than it actually has. How do we deal with this?
Our motion vectors are applied in order of importance, which is the order they appear in within the
list in *Basic Ideas*. The centripetal force vector is the highest importance, since it ensures the
robot sticks to the path. If the robot is far off the path, then the robot will not drive along the
path, and so the centripetal force vector will be reduced in magnitude. This is why ranking the
centripetal force correction above the translational correction doesn't produce issues. The next
highest, of course, is the translational correction. This corrects strictly the robot's position to
the closest point on the path. The reasoning behind this is that it is usually much more important
that the robot be on the path, and so avoid potential obstacles, rather than facing the correct
direction. The third highest important vector is the heading correction, which turns the robot to
the correct angle. This is higher than the drive vector since we don't want to drive forward if the
robot isn't facing the correct direction. Finally, the drive vector is applied. This ensures that
the robot only continues on the path when there aren't any major issues with the following.
As each vector is applied, the robot checks to see if the sum of the applied vectors is greater than
the power that the drivetrain can produce, which would be 1 motor power. If the magnitude of the
combined vectors is greater than 1, then the most recently added vector is scaled down until the
combined vectors' magnitude is equal to 1. If all vectors are able to be applied without exceeding
the power limit, then all the vectors can just be applied without issue.
### Centripetal Force Correction
Have you ever noticed that your robot seems to want to swing outwards when taking corners? This is
due to a lack of centripetal force correction. In order to take curves effectively, your robot must
accelerate towards the inside of the curve. If we can approximate the region of the path the robot
is at with a circle, then we can use the formula for centripetal force to calculate how much power
we need to allocate to approximate a centripetal force.
Because paths are defined with Bezier curves, we can easily take the first and second derivative of
the path, expressed as vectors. We can use that to calculate the curvature of the path, which is the
inverse of the length of the radius of the circle we can use to approximate the path. The actual
formula for the calculations of the curvature is the cross product of the first derivative and
second derivative, divided by the magnitude of the first derivative raised to the power of 3.
With this, along with the weight of the robot and the velocity of the robot along the path, we can
calculate the force necessary to keep the robot on the path, and then tune a scaling factor to turn
that force into a corresponding power for the robot.
### Translational Correction
This is as simple as it sounds: this corrects error in the robot's position only. The robot's translational
error is corrected with a PID control. The translational correction does not act along the path the
robot takes, but instead moves the robot back to the closest point on the path.
### Heading Correction
The heading correction operates very similarly to the translational correction, except this corrects
the direction the robot is facing. The heading correction will turn in the closest direction from the
robot's current heading to the target heading.
### Drive Vector
The drive vector points in the direction of the tangent of the path and it is responsible for moving
the robot along the path. Using basic kinematics equations, we can use the velocity of the robot
along the path, the length of path left, and a specified target rate of deceleration to calculate
the velocity we should be moving at. Additionally, after finding out the rate of deceleration of the
robot under 0 power, we can compensate for that with another kinematics equation. Combining these
two lets us control our velocity to both move along the path quickly and brake without overshooting.
## Additional Capabilities
In addition to following paths, Pedro Pathing can be used for a few other purposes.
### Holding Points
Pedro Pathing is also capable of holding a specified position and direction. This can be useful for
improving on the end accuracy of paths, providing additional correction time if possible. It can
also be useful in cases where contact with other robots might occur. For instance, in the 2022-2023
FTC season challenge, Power Play, robots might come into contact when scoring on a contested middle
junction. Pedro Pathing would be able to recover and correct from a robot collision, allowing for
more consistent scoring.
### TeleOp Enhancements
Finally, Pedro Pathing can be used in TeleOp to enhance driving. With regular mecanum drive, robots
will tend to swing out when taking corners. Pedro Pathing can account for that, allowing the robot
to take corners more smoothly and efficiently. Using the same localizer as is used in autonomous, a
first and second derivative can be estimated from previous positions. Then, with a modified version
of the curvature formula, we can estimate a centripetal force correction and apply it under driver
control.
## Questions?
If you still have more questions, feel free to contact us at `scottsbots10158@gmail.com` or
within our discord linked here(https://discord.gg/2GfC4qBP5s)

View File

@ -0,0 +1,28 @@
## Welcome!
This is the Pedro Pathing path following program developed by FTC team 10158 Scott's Bots with Logan
Nash in the 2023-2024 Centerstage season.
## Installation
The quickest way to get started is with the quickstart [here](https://github.com/AnyiLin/Pedro-Pathing-Quickstart).
Otherwise, take the `pedroPathing` folder and put it under the `teamcode` folder in your project.
You can do this from either downloading the project from the above quickstart link or the 10158
CENTERSTAGE repository [here](https://github.com/AnyiLin/10158-Centerstage).
For this version of Pedro Pathing, the localizer used is the Road Runner localizer. To install its
dependencies:
1. Find `build.dependencies.gradle` in the main folder of your project.
2. Add the following code to the end of the `repositories` block:
```
maven { url = 'https://maven.brott.dev/' }
```
3. Then, add the following code to the end of your `dependencies` block:
```
implementation 'com.acmerobotics.dashboard:dashboard:0.4.5'
```
4. Find the `build.gradle` file under the `teamcode` folder.
5. In this gradle file, add the following dependency:
```
implementation 'com.fasterxml.jackson.core:jackson-databind:2.12.7'
implementation 'org.jetbrains.kotlin:kotlin-stdlib:1.4.21'
```

View File

@ -0,0 +1,130 @@
## Prerequisites
Obviously, you have to have a robot to use Pedro Pathing. Also, Pedro Pathing is only able to work
with omnidirectional drives, like mecanum drive. There is currently no support for swerve drives.
You must also have a localizer of some sort. Pedro Pathing has a drive encoder, a two tracking wheel,
and a three tracking wheel localizer. You will need to have your localizer tuned before starting to
tune PedroPathing. Check out the tuning guide under the localization tab if you're planning on using one of the
localizers available in Pedro Pathing. Additionally, using [FTC Dashboard](http://192.168.43.1:8080/dash)
will help a lot in tuning. Team 16166 Watt'S Up made a path visualizer linked [here](https://pedro-path-generator.vercel.app).
The old Desmos visualizer is [here](https://www.desmos.com/calculator/3so1zx0hcd), but the one by
Watt'S Up is honestly a lot better.
One last thing to note is that Pedro Pathing operates in inches and radians. You can use centimeters
instead of inches, but you'll have to input all your measurement in centimeters, and any distances
that the tuners require you to push the robot or the tuners output will say "inches" when the actual
measurements will be in centimeters.
## Tuning
* First, make sure that your motor names and directions, located at the top of `FollowerConstants`,
are correct.
* After that, we need the mass of the robot in kg. This is used for the centripetal force correction,
and the mass, with the variable name `mass`, should be put on line `92` in the `FollowerConstants`
class under the `tuning` package.
* Next, we need to find the preferred mecanum drive vectors. The rollers on mecanum wheels point at a
45 degree angle from the forward direction, but the actual direction the force is output is actually
closer to forward. Before running any OpModes, make sure your motors are reversed properly in the
`Follower` class constructor. To find the direction your wheels will go, you will need to run the
`Forward Velocity Tuner` and `Strafe Velocity Tuner` OpModes. These will run your robot at full
power for 40 inches forward and to the right, respectively. The distance can be changed through FTC
Dashboard under the dropdown for each respective class, but higher distances work better. After the
distance has finished running, the end velocity will be output to telemetry. The robot may continue
to drift a little bit after the robot has finished running the distance, so make sure you have
plenty of room. Once you're done, put the velocity for the `Forward Velocity Tuner` on line `39` in
the `FollowerConstants` class, and the velocity for the `Strafe Velocity Tuner` on line `40` in the
`FollowerConstants` class. The variable names should be `xMovement` and `yMovement`, respectively.
* The last set of automatic tuners you'll need to run are the zero power acceleration tuners. These
find the rate at which your robot decelerates when power is cut from the drivetrain. This is used to
get a more accurate estimation of the drive vector. To find this, you will need to run the
`Forward Zero Power Acceleration Tuner` and the `Lateral Zero Power Acceleration Tuner` OpModes.
These will run your robot until it hits a velocity of 30 inches/second forward and to the right,
respectively. The velocity can be changed through FTC Dashboard under the dropdown for each
respective class, but higher velocities work better. After the velocity has been reached, power will
be cut from the drivetrain and the robot's deceleration will be tracked until the robot stops, at
which point it will display the deceleration in telemetry. This robot will need to drift to a stop
to properly work, and the higher the velocity the greater the drift distance, so make sure you have
enough room. Once you're done, put the zero power acceleration for the
`Forward Zero Power Acceleration Tuner` on line `100` in the `FollowerConstants` class and the zero
power acceleration for the `Lateral Zero Power Acceleration Tuner` on line `104` in the
`FollowerConstants` class. The variable names should be `forwardZeroPowerAcceleration` and
`lateralZeroPowerAcceleration`, respectively.
* After this, we will want to tune the translational PID. Go to FTC Dashboard and disable all but
the `useTranslational` checkboxes under the `Follower` tab. Then, run `StraightBackAndForth`.
Make sure you disable the timer on autonomous OpModes. You will notice in telemetry a message saying
that the robot will travel a distance forward and backward, this will not happen until later, so for
now you can ignore this message. The robot should not move when you run the opmode initally. Instead,
it should correct when you push it away from its starting position. Note that this correction should
happen regardless of the robot's rotation, and that the robot should not rotate itself (if it does,
disable `useHeading` as mentioned prior). Also note that the robot will only correct to an imaginary line
that runs straight forward from the robot's starting position, meaning that it will not correct in the
(original) forward direction. The PID for the translational error is called `translationalPIDF`.
If you need to add a feedforward value, use the `translationalPIDFFeedForward` since that will add
the feedforward in the direction the robot is trying to move, rather than the feedforward in the
PIDF itself, since those will only add the feedforward one way. You can change the PIDF constants
and feedforward values, under the `FollowerConstants` tab in FTC Dashboard.
To tune the PID, push the robot off the path and see how corrects. You will want to alternate sides
you push to reduce field wear and tear as well as push with varying power and distance. I would
recommend tuning the PID so that it is capable of correcting while minimizing oscillations and still
achieving a satisfactory level of accuracy. Overall, try to tune for fewer oscillations rather than
higher speeds or perfect accuracy, since this will make the robot run more smoothly under actual
pathing conditions.
* Next, we will tune the heading PID. The process is essentially the same as above, except you will
want to only enable `useHeading` under `Follower` on FTC Dashboard, as well as turn the robot from
opposing corners instead of pushing the robot. Naturally, instead of changing the stuff with
"translational" in the name, you will instead want to look for stuff with "heading" in the name.
Otherwise, these two PIDs are functionally very similar. The same tips from above will apply to this.
* Afterwards, we will tune the drive PID. Before we continue, we will need to set the
`zeroPowerAccelerationMultiplier`. This determines how fast your robot will decelerate as a factor
of how fast your robot will coast to a stop. Honestly, this is up to you. I personally used 4, but
what works best for you is most important. Higher numbers will cause a faster brake, but increase
oscillations at the end. Lower numbers will do the opposite. This can be found on line `113` in
`FollowerConstants`, named `zeroPowerAccelerationMultiplier`. The drive PID is much, much more sensitive than the others. For reference,
my P values were in the hundredths and thousandths place values, and my D values were in the hundred
thousandths and millionths place values. To tune this, enable `useDrive`, `useHeading`, and
`useTranslational` in the `Follower` dropdown in FTC Dashboard. Next, run `StraightBackAndForth`
and don't forget to turn off the timer on the OpMode. Then, tune the PID following the tips from
earlier. For this, it is very important to try to reduce oscillations. Additionally, I would
absolutely not recommend using the I, or integral, part of the PID for this. Using integral in
drivetrain PIDs is already not ideal, but it will continuously build up error in this PID, causing
major issues when it gets too strong. Don't use I; P and D are enough. In the versions of Pedro Pathing
from after late July 2024, there is a Kalman filter on the drive error and the drive PID has a
filter as well. These smooth out the drive error and PID response so that there is not as much
oscillation during the braking portion of each path. The Kalman filter is tuned to have 6 for the
model covariance and 1 for the data covariance. These values should work, but if you feel inclined
to tune the Kalman filter yourself, a higher ratio of model covariance to data covariance means that
the filter will rely more on its previous output rather than the data, and the opposite ratio will
mean that the filter will rely more so on the data input (the raw drive error) rather than the model.
The filtered PID functions like a normal PID, except the derivative term is a weighted average of the
current derivative and the previous derivative. Currently, the weighting, or time constant for the
drive filtered PID is 0.6, so the derivative output is 0.6 times the previous derivative plus 0.4
times the current derivative. Feel free to mess around with these values and find what works best
for your robot!
* Finally, we will want to tune the centripetal force correction. This is a pretty simple tune. Open
up FTC Dashboard and enable everything under the `Follower` tab. Then, run `CurvedBackAndForth`
and turn off its timer. If you notice the robot is correcting towards the inside of the curve
as/after running a path, then increase `centripetalScaling`, which can be found on line `95` of
`FollowerConstants`. If the robot is correcting towards the outside of the curve, then decrease
`centripetalScaling`.
* Once you've found satisfactory tunings for everything, run the robot around in
`StraightBackAndForth`, `CurvedBackAndForth`, or some paths of your own making. There's also
`Circle`, but that's more so for fun than anything else. If you notice something could be improved,
feel free to mess around more with your PIDs. That should be all! If you have any more questions,
refer to the OVERVIEW readme file or the README readme file. Best of luck to your team this season! :)
## Note About the PIDs
In versions of Pedro Pathing before early August 2024, there were 2 PIDs used in the translational,
heading, and drive control. However, now there is only one main PID. The old system can still be used.
Scroll down to the bottom of `FollowerConstants` and set all the booleans from lines `163` to `165`
to true. They should be named `useSecondaryTranslationalPID`, `useSecondaryHeadingPID`, and `useSecondaryDrivePID`.
This will enable the two PID system that Pedro Pathing originally used. From there, scroll
down and all the values pertaining to the secondary PIDs will be there. The two PID system works with
a PID that handles larger errors (the main PID) and a second PID to handle smaller errors (the
secondary PID). The main PID should be tuned to move the error within the secondary PID's range
without providing too much momentum that could cause an overshoot. The secondary PID should be tuned
to correct within its range quickly and accurately while minimizing oscillations.

View File

@ -0,0 +1,62 @@
package org.firstinspires.ftc.teamcode.pedroPathing.examples;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import org.firstinspires.ftc.teamcode.pedroPathing.follower.Follower;
/**
* This is the TeleOpEnhancements OpMode. It is an example usage of the TeleOp enhancements that
* Pedro Pathing is capable of.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/21/2024
*/
@TeleOp(name = "Pedro Pathing TeleOp Enhancements", group = "Test")
public class TeleOpEnhancements extends OpMode {
private Follower follower;
private DcMotorEx leftFront;
private DcMotorEx leftRear;
private DcMotorEx rightFront;
private DcMotorEx rightRear;
/**
* This initializes the drive motors as well as the Follower and motion Vectors.
*/
@Override
public void init() {
follower = new Follower(hardwareMap);
leftFront = hardwareMap.get(DcMotorEx.class, leftFrontMotorName);
leftRear = hardwareMap.get(DcMotorEx.class, leftRearMotorName);
rightRear = hardwareMap.get(DcMotorEx.class, rightRearMotorName);
rightFront = hardwareMap.get(DcMotorEx.class, rightFrontMotorName);
leftFront.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
leftRear.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
rightRear.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
rightFront.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
follower.startTeleopDrive();
}
/**
* This runs the OpMode. This is only drive control with Pedro Pathing live centripetal force
* correction.
*/
@Override
public void loop() {
follower.setTeleOpMovementVectors(-gamepad1.left_stick_y, -gamepad1.left_stick_x, -gamepad1.right_stick_x);
follower.update();
}
}

View File

@ -0,0 +1,172 @@
package org.firstinspires.ftc.teamcode.pedroPathing.follower;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
/**
* This is the DriveVectorScaler class. This class takes in inputs Vectors for driving, heading
* correction, and translational/centripetal correction and returns an array with wheel powers.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/4/2024
*/
public class DriveVectorScaler {
// This is ordered left front, left back, right front, right back. These are also normalized.
private Vector[] mecanumVectors;
private double maxPowerScaling = 1;
/**
* This creates a new DriveVectorScaler, which takes in various movement vectors and outputs
* the wheel drive powers necessary to move in the intended direction, given the true movement
* vector for the front left mecanum wheel.
*
* @param frontLeftVector this is the front left mecanum wheel's preferred drive vector.
*/
public DriveVectorScaler(Vector frontLeftVector) {
Vector copiedFrontLeftVector = MathFunctions.normalizeVector(frontLeftVector);
mecanumVectors = new Vector[]{
new Vector(copiedFrontLeftVector.getMagnitude(), copiedFrontLeftVector.getTheta()),
new Vector(copiedFrontLeftVector.getMagnitude(), 2*Math.PI-copiedFrontLeftVector.getTheta()),
new Vector(copiedFrontLeftVector.getMagnitude(), 2*Math.PI-copiedFrontLeftVector.getTheta()),
new Vector(copiedFrontLeftVector.getMagnitude(), copiedFrontLeftVector.getTheta())};
}
/**
* This takes in vectors for corrective power, heading power, and pathing power and outputs
* an Array of four doubles, one for each wheel's motor power.
*
* IMPORTANT NOTE: all vector inputs are clamped between 0 and 1 inclusive in magnitude.
*
* @param correctivePower this Vector includes the centrifugal force scaling Vector as well as a
* translational power Vector to correct onto the Bezier curve the Follower
* is following.
* @param headingPower this Vector points in the direction of the robot's current heaing, and
* the magnitude tells the robot how much it should turn and in which
* direction.
* @param pathingPower this Vector points in the direction the robot needs to go to continue along
* the Path.
* @param robotHeading this is the current heading of the robot, which is used to calculate how
* much power to allocate to each wheel.
* @return this returns an Array of doubles with a length of 4, which contains the wheel powers.
*/
public double[] getDrivePowers(Vector correctivePower, Vector headingPower, Vector pathingPower, double robotHeading) {
// clamps down the magnitudes of the input vectors
if (correctivePower.getMagnitude() > maxPowerScaling) correctivePower.setMagnitude(maxPowerScaling);
if (headingPower.getMagnitude() > maxPowerScaling) headingPower.setMagnitude(maxPowerScaling);
if (pathingPower.getMagnitude() > maxPowerScaling) pathingPower.setMagnitude(maxPowerScaling);
// the powers for the wheel vectors
double [] wheelPowers = new double[4];
// This contains a copy of the mecanum wheel vectors
Vector[] mecanumVectorsCopy = new Vector[4];
// this contains the pathing vectors, one for each side (heading control requires 2)
Vector[] truePathingVectors = new Vector[2];
if (correctivePower.getMagnitude() == maxPowerScaling) {
// checks for corrective power equal to max power scaling in magnitude. if equal, then set pathing power to that
truePathingVectors[0] = MathFunctions.copyVector(correctivePower);
truePathingVectors[1] = MathFunctions.copyVector(correctivePower);
} else {
// corrective power did not take up all the power, so add on heading power
Vector leftSideVector = MathFunctions.subtractVectors(correctivePower, headingPower);
Vector rightSideVector = MathFunctions.addVectors(correctivePower, headingPower);
if (leftSideVector.getMagnitude() > maxPowerScaling || rightSideVector.getMagnitude() > maxPowerScaling) {
//if the combined corrective and heading power is greater than 1, then scale down heading power
double headingScalingFactor = Math.min(findNormalizingScaling(correctivePower, headingPower), findNormalizingScaling(correctivePower, MathFunctions.scalarMultiplyVector(headingPower, -1)));
truePathingVectors[0] = MathFunctions.subtractVectors(correctivePower, MathFunctions.scalarMultiplyVector(headingPower, headingScalingFactor));
truePathingVectors[1] = MathFunctions.addVectors(correctivePower, MathFunctions.scalarMultiplyVector(headingPower, headingScalingFactor));
} else {
// if we're here then we can add on some drive power but scaled down to 1
Vector leftSideVectorWithPathing = MathFunctions.addVectors(leftSideVector, pathingPower);
Vector rightSideVectorWithPathing = MathFunctions.addVectors(rightSideVector, pathingPower);
if (leftSideVectorWithPathing.getMagnitude() > maxPowerScaling || rightSideVectorWithPathing.getMagnitude() > maxPowerScaling) {
// too much power now, so we scale down the pathing vector
double pathingScalingFactor = Math.min(findNormalizingScaling(leftSideVector, pathingPower), findNormalizingScaling(rightSideVector, pathingPower));
truePathingVectors[0] = MathFunctions.addVectors(leftSideVector, MathFunctions.scalarMultiplyVector(pathingPower, pathingScalingFactor));
truePathingVectors[1] = MathFunctions.addVectors(rightSideVector, MathFunctions.scalarMultiplyVector(pathingPower, pathingScalingFactor));
} else {
// just add the vectors together and you get the final vector
truePathingVectors[0] = MathFunctions.copyVector(leftSideVectorWithPathing);
truePathingVectors[1] = MathFunctions.copyVector(rightSideVectorWithPathing);
}
}
}
truePathingVectors[0] = MathFunctions.scalarMultiplyVector(truePathingVectors[0], 2.0);
truePathingVectors[1] = MathFunctions.scalarMultiplyVector(truePathingVectors[1], 2.0);
for (int i = 0; i < mecanumVectorsCopy.length; i++) {
// this copies the vectors from mecanumVectors but creates new references for them
mecanumVectorsCopy[i] = MathFunctions.copyVector(mecanumVectors[i]);
mecanumVectorsCopy[i].rotateVector(robotHeading);
}
wheelPowers[0] = (mecanumVectorsCopy[1].getXComponent()*truePathingVectors[0].getYComponent() - truePathingVectors[0].getXComponent()*mecanumVectorsCopy[1].getYComponent()) / (mecanumVectorsCopy[1].getXComponent()*mecanumVectorsCopy[0].getYComponent() - mecanumVectorsCopy[0].getXComponent()*mecanumVectorsCopy[1].getYComponent());
wheelPowers[1] = (mecanumVectorsCopy[0].getXComponent()*truePathingVectors[0].getYComponent() - truePathingVectors[0].getXComponent()*mecanumVectorsCopy[0].getYComponent()) / (mecanumVectorsCopy[0].getXComponent()*mecanumVectorsCopy[1].getYComponent() - mecanumVectorsCopy[1].getXComponent()*mecanumVectorsCopy[0].getYComponent());
wheelPowers[2] = (mecanumVectorsCopy[3].getXComponent()*truePathingVectors[1].getYComponent() - truePathingVectors[1].getXComponent()*mecanumVectorsCopy[3].getYComponent()) / (mecanumVectorsCopy[3].getXComponent()*mecanumVectorsCopy[2].getYComponent() - mecanumVectorsCopy[2].getXComponent()*mecanumVectorsCopy[3].getYComponent());
wheelPowers[3] = (mecanumVectorsCopy[2].getXComponent()*truePathingVectors[1].getYComponent() - truePathingVectors[1].getXComponent()*mecanumVectorsCopy[2].getYComponent()) / (mecanumVectorsCopy[2].getXComponent()*mecanumVectorsCopy[3].getYComponent() - mecanumVectorsCopy[3].getXComponent()*mecanumVectorsCopy[2].getYComponent());
double wheelPowerMax = Math.max(Math.max(Math.abs(wheelPowers[0]), Math.abs(wheelPowers[1])), Math.max(Math.abs(wheelPowers[2]), Math.abs(wheelPowers[3])));
if (wheelPowerMax > maxPowerScaling) {
wheelPowers[0] /= wheelPowerMax;
wheelPowers[1] /= wheelPowerMax;
wheelPowers[2] /= wheelPowerMax;
wheelPowers[3] /= wheelPowerMax;
}
return wheelPowers;
}
/**
* This takes in two Vectors, one static and one variable, and returns the scaling factor that,
* when multiplied to the variable Vector, results in magnitude of the sum of the static Vector
* and the scaled variable Vector being the max power scaling.
*
* IMPORTANT NOTE: I did not intend for this to be used for anything other than the method above
* this one in this class, so there will be errors if you input Vectors of length greater than maxPowerScaling,
* and it will scale up the variable Vector if the magnitude of the sum of the two input Vectors
* isn't greater than maxPowerScaling. So, just don't use this elsewhere. There's gotta be a better way to do
* whatever you're trying to do.
*
* I know that this is used outside of this class, however, I created this method so I get to
* use it if I want to. Also, it's only used once outside of the DriveVectorScaler class, and
* it's used to scale Vectors, as intended.
*
* @param staticVector the Vector that is held constant.
* @param variableVector the Vector getting scaled to make the sum of the input Vectors have a
* magnitude of maxPowerScaling.
* @return returns the scaling factor for the variable Vector.
*/
public double findNormalizingScaling(Vector staticVector, Vector variableVector) {
double a = Math.pow(variableVector.getXComponent(), 2) + Math.pow(variableVector.getYComponent(), 2);
double b = staticVector.getXComponent() * variableVector.getXComponent() + staticVector.getYComponent() * variableVector.getYComponent();
double c = Math.pow(staticVector.getXComponent(), 2) + Math.pow(staticVector.getYComponent(), 2) - Math.pow(maxPowerScaling, 2);
return (-b + Math.sqrt(Math.pow(b, 2) - a*c))/(a);
}
/**
* Sets the maximum power that can be used by the drive vector scaler. Clamped between 0 and 1.
*
* @param maxPowerScaling setting the max power scaling
*/
public void setMaxPowerScaling(double maxPowerScaling) {
this.maxPowerScaling = MathFunctions.clamp(maxPowerScaling, 0, 1);
}
/**
* Gets the maximum power that can be used by the drive vector scaler. Ranges between 0 and 1.
*
* @return returns the max power scaling
*/
public double getMaxPowerScaling() {
return maxPowerScaling;
}
}

View File

@ -0,0 +1,83 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
/**
* This is the Encoder class. This tracks the position of a motor of class DcMotorEx. The motor
* must have an encoder attached. It can also get changes in position.
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
public class Encoder {
private DcMotorEx motor;
private double previousPosition;
private double currentPosition;
private double multiplier;
public final static double FORWARD = 1, REVERSE = -1;
/**
* This creates a new Encoder from a DcMotorEx.
*
* @param setMotor the motor this will be tracking
*/
public Encoder(DcMotorEx setMotor) {
motor = setMotor;
multiplier = FORWARD;
reset();
}
/**
* This sets the direction/multiplier of the Encoder. Setting 1 or -1 will make the Encoder track
* forward or in reverse, respectively. Any multiple of either one will scale the Encoder's output
* by that amount.
*
* @param setMultiplier the multiplier/direction to set
*/
public void setDirection(double setMultiplier) {
multiplier = setMultiplier;
}
/**
* This resets the Encoder's position and the current and previous position in the code.
*/
public void reset() {
motor.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
previousPosition = motor.getCurrentPosition();
currentPosition = motor.getCurrentPosition();
motor.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
}
/**
* This updates the Encoder's tracked current position and previous position.
*/
public void update() {
previousPosition = currentPosition;
currentPosition = motor.getCurrentPosition();
}
/**
* This returns the multiplier/direction of the Encoder.
*
* @return returns the multiplier
*/
public double getMultiplier() {
return multiplier * (motor.getDirection() == DcMotorSimple.Direction.FORWARD ? 1 : -1);
}
/**
* This returns the change in position from the previous position to the current position. One
* important thing to note is that this encoder does not track velocity, only change in position.
* This is because I am using a pose exponential method of localization, which doesn't need the
* velocity of the encoders. Velocity of the robot is calculated in the localizer using an elapsed
* time timer there.
*
* @return returns the change in position of the Encoder
*/
public double getDeltaPosition() {
return getMultiplier() * (currentPosition - previousPosition);
}
}

View File

@ -0,0 +1,520 @@
/* MIT License
* Copyright (c) [2024] [Base 10 Assets, LLC]
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package org.firstinspires.ftc.teamcode.pedroPathing.localization;
import static com.qualcomm.robotcore.util.TypeConversion.byteArrayToInt;
import com.qualcomm.hardware.lynx.LynxI2cDeviceSynch;
import com.qualcomm.robotcore.hardware.I2cAddr;
import com.qualcomm.robotcore.hardware.I2cDeviceSynchDevice;
import com.qualcomm.robotcore.hardware.I2cDeviceSynchSimple;
import com.qualcomm.robotcore.hardware.configuration.annotations.DeviceProperties;
import com.qualcomm.robotcore.hardware.configuration.annotations.I2cDeviceType;
import com.qualcomm.robotcore.util.TypeConversion;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.robotcore.external.navigation.Pose2D;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.Arrays;
@I2cDeviceType
@DeviceProperties(
name = "goBILDA® Pinpoint Odometry Computer",
xmlTag = "goBILDAPinpoint",
description ="goBILDA® Pinpoint Odometry Computer (IMU Sensor Fusion for 2 Wheel Odometry)"
)
public class GoBildaPinpointDriver extends I2cDeviceSynchDevice<I2cDeviceSynchSimple> {
private int deviceStatus = 0;
private int loopTime = 0;
private int xEncoderValue = 0;
private int yEncoderValue = 0;
private float xPosition = 0;
private float yPosition = 0;
private float hOrientation = 0;
private float xVelocity = 0;
private float yVelocity = 0;
private float hVelocity = 0;
private static final float goBILDA_SWINGARM_POD = 13.26291192f; //ticks-per-mm for the goBILDA Swingarm Pod
private static final float goBILDA_4_BAR_POD = 19.89436789f; //ticks-per-mm for the goBILDA 4-Bar Pod
//i2c address of the device
public static final byte DEFAULT_ADDRESS = 0x31;
public GoBildaPinpointDriver(I2cDeviceSynchSimple deviceClient, boolean deviceClientIsOwned) {
super(deviceClient, deviceClientIsOwned);
this.deviceClient.setI2cAddress(I2cAddr.create7bit(DEFAULT_ADDRESS));
super.registerArmingStateCallback(false);
}
@Override
public Manufacturer getManufacturer() {
return Manufacturer.Other;
}
@Override
protected synchronized boolean doInitialize() {
((LynxI2cDeviceSynch)(deviceClient)).setBusSpeed(LynxI2cDeviceSynch.BusSpeed.FAST_400K);
return true;
}
@Override
public String getDeviceName() {
return "goBILDA® Pinpoint Odometry Computer";
}
//Register map of the i2c device
private enum Register {
DEVICE_ID (1),
DEVICE_VERSION (2),
DEVICE_STATUS (3),
DEVICE_CONTROL (4),
LOOP_TIME (5),
X_ENCODER_VALUE (6),
Y_ENCODER_VALUE (7),
X_POSITION (8),
Y_POSITION (9),
H_ORIENTATION (10),
X_VELOCITY (11),
Y_VELOCITY (12),
H_VELOCITY (13),
MM_PER_TICK (14),
X_POD_OFFSET (15),
Y_POD_OFFSET (16),
YAW_SCALAR (17),
BULK_READ (18);
private final int bVal;
Register(int bVal){
this.bVal = bVal;
}
}
//Device Status enum that captures the current fault condition of the device
public enum DeviceStatus{
NOT_READY (0),
READY (1),
CALIBRATING (1 << 1),
FAULT_X_POD_NOT_DETECTED (1 << 2),
FAULT_Y_POD_NOT_DETECTED (1 << 3),
FAULT_NO_PODS_DETECTED (1 << 2 | 1 << 3),
FAULT_IMU_RUNAWAY (1 << 4);
private final int status;
DeviceStatus(int status){
this.status = status;
}
}
//enum that captures the direction the encoders are set to
public enum EncoderDirection{
FORWARD,
REVERSED;
}
//enum that captures the kind of goBILDA odometry pods, if goBILDA pods are used
public enum GoBildaOdometryPods {
goBILDA_SWINGARM_POD,
goBILDA_4_BAR_POD;
}
//enum that captures a limited scope of read data. More options may be added in future update
public enum readData {
ONLY_UPDATE_HEADING,
}
/** Writes an int to the i2c device
@param reg the register to write the int to
@param i the integer to write to the register
*/
private void writeInt(final Register reg, int i){
deviceClient.write(reg.bVal, TypeConversion.intToByteArray(i,ByteOrder.LITTLE_ENDIAN));
}
/**
* Reads an int from a register of the i2c device
* @param reg the register to read from
* @return returns an int that contains the value stored in the read register
*/
private int readInt(Register reg){
return byteArrayToInt(deviceClient.read(reg.bVal,4), ByteOrder.LITTLE_ENDIAN);
}
/**
* Converts a byte array to a float value
* @param byteArray byte array to transform
* @param byteOrder order of byte array to convert
* @return the float value stored by the byte array
*/
private float byteArrayToFloat(byte[] byteArray, ByteOrder byteOrder){
return ByteBuffer.wrap(byteArray).order(byteOrder).getFloat();
}
/**
* Reads a float from a register
* @param reg the register to read
* @return the float value stored in that register
*/
private float readFloat(Register reg){
return byteArrayToFloat(deviceClient.read(reg.bVal,4),ByteOrder.LITTLE_ENDIAN);
}
/**
* Converts a float to a byte array
* @param value the float array to convert
* @return the byte array converted from the float
*/
private byte [] floatToByteArray (float value, ByteOrder byteOrder) {
return ByteBuffer.allocate(4).order(byteOrder).putFloat(value).array();
}
/**
* Writes a byte array to a register on the i2c device
* @param reg the register to write to
* @param bytes the byte array to write
*/
private void writeByteArray (Register reg, byte[] bytes){
deviceClient.write(reg.bVal,bytes);
}
/**
* Writes a float to a register on the i2c device
* @param reg the register to write to
* @param f the float to write
*/
private void writeFloat (Register reg, float f){
byte[] bytes = ByteBuffer.allocate(4).order(ByteOrder.LITTLE_ENDIAN).putFloat(f).array();
deviceClient.write(reg.bVal,bytes);
}
/**
* Looks up the DeviceStatus enum corresponding with an int value
* @param s int to lookup
* @return the Odometry Computer state
*/
private DeviceStatus lookupStatus (int s){
if ((s & DeviceStatus.CALIBRATING.status) != 0){
return DeviceStatus.CALIBRATING;
}
boolean xPodDetected = (s & DeviceStatus.FAULT_X_POD_NOT_DETECTED.status) == 0;
boolean yPodDetected = (s & DeviceStatus.FAULT_Y_POD_NOT_DETECTED.status) == 0;
if(!xPodDetected && !yPodDetected){
return DeviceStatus.FAULT_NO_PODS_DETECTED;
}
if (!xPodDetected){
return DeviceStatus.FAULT_X_POD_NOT_DETECTED;
}
if (!yPodDetected){
return DeviceStatus.FAULT_Y_POD_NOT_DETECTED;
}
if ((s & DeviceStatus.FAULT_IMU_RUNAWAY.status) != 0){
return DeviceStatus.FAULT_IMU_RUNAWAY;
}
if ((s & DeviceStatus.READY.status) != 0){
return DeviceStatus.READY;
}
else {
return DeviceStatus.NOT_READY;
}
}
/**
* Call this once per loop to read new data from the Odometry Computer. Data will only update once this is called.
*/
public void update(){
byte[] bArr = deviceClient.read(Register.BULK_READ.bVal, 40);
deviceStatus = byteArrayToInt(Arrays.copyOfRange (bArr, 0, 4), ByteOrder.LITTLE_ENDIAN);
loopTime = byteArrayToInt(Arrays.copyOfRange (bArr, 4, 8), ByteOrder.LITTLE_ENDIAN);
xEncoderValue = byteArrayToInt(Arrays.copyOfRange (bArr, 8, 12), ByteOrder.LITTLE_ENDIAN);
yEncoderValue = byteArrayToInt(Arrays.copyOfRange (bArr, 12,16), ByteOrder.LITTLE_ENDIAN);
xPosition = byteArrayToFloat(Arrays.copyOfRange(bArr, 16,20), ByteOrder.LITTLE_ENDIAN);
yPosition = byteArrayToFloat(Arrays.copyOfRange(bArr, 20,24), ByteOrder.LITTLE_ENDIAN);
hOrientation = byteArrayToFloat(Arrays.copyOfRange(bArr, 24,28), ByteOrder.LITTLE_ENDIAN);
xVelocity = byteArrayToFloat(Arrays.copyOfRange(bArr, 28,32), ByteOrder.LITTLE_ENDIAN);
yVelocity = byteArrayToFloat(Arrays.copyOfRange(bArr, 32,36), ByteOrder.LITTLE_ENDIAN);
hVelocity = byteArrayToFloat(Arrays.copyOfRange(bArr, 36,40), ByteOrder.LITTLE_ENDIAN);
}
/**
* Call this once per loop to read new data from the Odometry Computer. This is an override of the update() function
* which allows a narrower range of data to be read from the device for faster read times. Currently ONLY_UPDATE_HEADING
* is supported.
* @param data GoBildaPinpointDriver.readData.ONLY_UPDATE_HEADING
*/
public void update(readData data) {
if (data == readData.ONLY_UPDATE_HEADING) {
hOrientation = byteArrayToFloat(deviceClient.read(Register.H_ORIENTATION.bVal, 4), ByteOrder.LITTLE_ENDIAN);
}
}
/**
* Sets the odometry pod positions relative to the point that the odometry computer tracks around.<br><br>
* The most common tracking position is the center of the robot. <br> <br>
* The X pod offset refers to how far sideways (in mm) from the tracking point the X (forward) odometry pod is. Left of the center is a positive number, right of center is a negative number. <br>
* the Y pod offset refers to how far forwards (in mm) from the tracking point the Y (strafe) odometry pod is. forward of center is a positive number, backwards is a negative number.<br>
* @param xOffset how sideways from the center of the robot is the X (forward) pod? Left increases
* @param yOffset how far forward from the center of the robot is the Y (Strafe) pod? forward increases
*/
public void setOffsets(double xOffset, double yOffset){
writeFloat(Register.X_POD_OFFSET, (float) xOffset);
writeFloat(Register.Y_POD_OFFSET, (float) yOffset);
}
/**
* Recalibrates the Odometry Computer's internal IMU. <br><br>
* <strong> Robot MUST be stationary </strong> <br><br>
* Device takes a large number of samples, and uses those as the gyroscope zero-offset. This takes approximately 0.25 seconds.
*/
public void recalibrateIMU(){writeInt(Register.DEVICE_CONTROL,1<<0);}
/**
* Resets the current position to 0,0,0 and recalibrates the Odometry Computer's internal IMU. <br><br>
* <strong> Robot MUST be stationary </strong> <br><br>
* Device takes a large number of samples, and uses those as the gyroscope zero-offset. This takes approximately 0.25 seconds.
*/
public void resetPosAndIMU(){writeInt(Register.DEVICE_CONTROL,1<<1);}
/**
* Can reverse the direction of each encoder.
* @param xEncoder FORWARD or REVERSED, X (forward) pod should increase when the robot is moving forward
* @param yEncoder FORWARD or REVERSED, Y (strafe) pod should increase when the robot is moving left
*/
public void setEncoderDirections(EncoderDirection xEncoder, EncoderDirection yEncoder){
if (xEncoder == EncoderDirection.FORWARD){
writeInt(Register.DEVICE_CONTROL,1<<5);
}
if (xEncoder == EncoderDirection.REVERSED) {
writeInt(Register.DEVICE_CONTROL,1<<4);
}
if (yEncoder == EncoderDirection.FORWARD){
writeInt(Register.DEVICE_CONTROL,1<<3);
}
if (yEncoder == EncoderDirection.REVERSED){
writeInt(Register.DEVICE_CONTROL,1<<2);
}
}
/**
* If you're using goBILDA odometry pods, the ticks-per-mm values are stored here for easy access.<br><br>
* @param pods goBILDA_SWINGARM_POD or goBILDA_4_BAR_POD
*/
public void setEncoderResolution(GoBildaOdometryPods pods){
if (pods == GoBildaOdometryPods.goBILDA_SWINGARM_POD) {
writeByteArray(Register.MM_PER_TICK, (floatToByteArray(goBILDA_SWINGARM_POD, ByteOrder.LITTLE_ENDIAN)));
}
if (pods == GoBildaOdometryPods.goBILDA_4_BAR_POD){
writeByteArray(Register.MM_PER_TICK,(floatToByteArray(goBILDA_4_BAR_POD, ByteOrder.LITTLE_ENDIAN)));
}
}
/**
* Sets the encoder resolution in ticks per mm of the odometry pods. <br>
* You can find this number by dividing the counts-per-revolution of your encoder by the circumference of the wheel.
* @param ticks_per_mm should be somewhere between 10 ticks/mm and 100 ticks/mm a goBILDA Swingarm pod is ~13.26291192
*/
public void setEncoderResolution(double ticks_per_mm){
writeByteArray(Register.MM_PER_TICK,(floatToByteArray((float) ticks_per_mm,ByteOrder.LITTLE_ENDIAN)));
}
/**
* Tuning this value should be unnecessary.<br>
* The goBILDA Odometry Computer has a per-device tuned yaw offset already applied when you receive it.<br><br>
* This is a scalar that is applied to the gyro's yaw value. Increasing it will mean it will report more than one degree for every degree the sensor fusion algorithm measures. <br><br>
* You can tune this variable by rotating the robot a large amount (10 full turns is a good starting place) and comparing the amount that the robot rotated to the amount measured.
* Rotating the robot exactly 10 times should measure 3600°. If it measures more or less, divide moved amount by the measured amount and apply that value to the Yaw Offset.<br><br>
* If you find that to get an accurate heading number you need to apply a scalar of more than 1.05, or less than 0.95, your device may be bad. Please reach out to tech@gobilda.com
* @param yawOffset A scalar for the robot's heading.
*/
public void setYawScalar(double yawOffset){
writeByteArray(Register.YAW_SCALAR,(floatToByteArray((float) yawOffset, ByteOrder.LITTLE_ENDIAN)));
}
/**
* Send a position that the Pinpoint should use to track your robot relative to. You can use this to
* update the estimated position of your robot with new external sensor data, or to run a robot
* in field coordinates. <br><br>
* This overrides the current position. <br><br>
* <strong>Using this feature to track your robot's position in field coordinates:</strong> <br>
* When you start your code, send a Pose2D that describes the starting position on the field of your robot. <br>
* Say you're on the red alliance, your robot is against the wall and closer to the audience side,
* and the front of your robot is pointing towards the center of the field.
* You can send a setPosition with something like -600mm x, -1200mm Y, and 90 degrees. The pinpoint would then always
* keep track of how far away from the center of the field you are. <br><br>
* <strong>Using this feature to update your position with additional sensors: </strong><br>
* Some robots have a secondary way to locate their robot on the field. This is commonly
* Apriltag localization in FTC, but it can also be something like a distance sensor.
* Often these external sensors are absolute (meaning they measure something about the field)
* so their data is very accurate. But they can be slower to read, or you may need to be in a very specific
* position on the field to use them. In that case, spend most of your time relying on the Pinpoint
* to determine your location. Then when you pull a new position from your secondary sensor,
* send a setPosition command with the new position. The Pinpoint will then track your movement
* relative to that new, more accurate position.
* @param pos a Pose2D describing the robot's new position.
*/
public Pose2D setPosition(Pose2D pos){
writeByteArray(Register.X_POSITION,(floatToByteArray((float) pos.getX(DistanceUnit.MM), ByteOrder.LITTLE_ENDIAN)));
writeByteArray(Register.Y_POSITION,(floatToByteArray((float) pos.getY(DistanceUnit.MM),ByteOrder.LITTLE_ENDIAN)));
writeByteArray(Register.H_ORIENTATION,(floatToByteArray((float) pos.getHeading(AngleUnit.RADIANS),ByteOrder.LITTLE_ENDIAN)));
return pos;
}
/**
* Checks the deviceID of the Odometry Computer. Should return 1.
* @return 1 if device is functional.
*/
public int getDeviceID(){return readInt(Register.DEVICE_ID);}
/**
* @return the firmware version of the Odometry Computer
*/
public int getDeviceVersion(){return readInt(Register.DEVICE_VERSION); }
public float getYawScalar(){return readFloat(Register.YAW_SCALAR); }
/**
* Device Status stores any faults the Odometry Computer may be experiencing. These faults include:
* @return one of the following states:<br>
* NOT_READY - The device is currently powering up. And has not initialized yet. RED LED<br>
* READY - The device is currently functioning as normal. GREEN LED<br>
* CALIBRATING - The device is currently recalibrating the gyro. RED LED<br>
* FAULT_NO_PODS_DETECTED - the device does not detect any pods plugged in. PURPLE LED <br>
* FAULT_X_POD_NOT_DETECTED - The device does not detect an X pod plugged in. BLUE LED <br>
* FAULT_Y_POD_NOT_DETECTED - The device does not detect a Y pod plugged in. ORANGE LED <br>
*/
public DeviceStatus getDeviceStatus(){return lookupStatus(deviceStatus); }
/**
* Checks the Odometry Computer's most recent loop time.<br><br>
* If values less than 500, or more than 1100 are commonly seen here, there may be something wrong with your device. Please reach out to tech@gobilda.com
* @return loop time in microseconds (1/1,000,000 seconds)
*/
public int getLoopTime(){return loopTime; }
/**
* Checks the Odometry Computer's most recent loop frequency.<br><br>
* If values less than 900, or more than 2000 are commonly seen here, there may be something wrong with your device. Please reach out to tech@gobilda.com
* @return Pinpoint Frequency in Hz (loops per second),
*/
public double getFrequency(){
if (loopTime != 0){
return 1000000.0/loopTime;
}
else {
return 0;
}
}
/**
* @return the raw value of the X (forward) encoder in ticks
*/
public int getEncoderX(){return xEncoderValue; }
/**
* @return the raw value of the Y (strafe) encoder in ticks
*/
public int getEncoderY(){return yEncoderValue; }
/**
* @return the estimated X (forward) position of the robot in mm
*/
public double getPosX(){return xPosition; }
/**
* @return the estimated Y (Strafe) position of the robot in mm
*/
public double getPosY(){return yPosition; }
/**
* @return the estimated H (heading) position of the robot in Radians
*/
public double getHeading(){return hOrientation;}
/**
* @return the estimated X (forward) velocity of the robot in mm/sec
*/
public double getVelX(){return xVelocity; }
/**
* @return the estimated Y (strafe) velocity of the robot in mm/sec
*/
public double getVelY(){return yVelocity; }
/**
* @return the estimated H (heading) velocity of the robot in radians/sec
*/
public double getHeadingVelocity(){return hVelocity; }
/**
* <strong> This uses its own I2C read, avoid calling this every loop. </strong>
* @return the user-set offset for the X (forward) pod
*/
public float getXOffset(){return readFloat(Register.X_POD_OFFSET);}
/**
* <strong> This uses its own I2C read, avoid calling this every loop. </strong>
* @return the user-set offset for the Y (strafe) pod
*/
public float getYOffset(){return readFloat(Register.Y_POD_OFFSET);}
/**
* @return a Pose2D containing the estimated position of the robot
*/
public Pose2D getPosition(){
return new Pose2D(DistanceUnit.MM,
xPosition,
yPosition,
AngleUnit.RADIANS,
hOrientation);
}
/**
* @return a Pose2D containing the estimated velocity of the robot, velocity is unit per second
*/
public Pose2D getVelocity(){
return new Pose2D(DistanceUnit.MM,
xVelocity,
yVelocity,
AngleUnit.RADIANS,
hVelocity);
}
}

View File

@ -0,0 +1,105 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
/**
* This is the Localizer class. It is an abstract superclass of all localizers used in Pedro Pathing,
* so it contains abstract methods that will have a concrete implementation in the subclasses. Any
* method that all localizers will need will be in this class.
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
public abstract class Localizer {
/**
* This returns the current pose estimate from the Localizer.
*
* @return returns the pose as a Pose object.
*/
public abstract Pose getPose();
/**
* This returns the current velocity estimate from the Localizer.
*
* @return returns the velocity as a Pose object.
*/
public abstract Pose getVelocity();
/**
* This returns the current velocity estimate from the Localizer as a Vector.
*
* @return returns the velocity as a Vector.
*/
public abstract Vector getVelocityVector();
/**
* This sets the start pose of the Localizer. Changing the start pose should move the robot as if
* all its previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
public abstract void setStartPose(Pose setStart);
/**
* This sets the current pose estimate of the Localizer. Changing this should just change the
* robot's current pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
public abstract void setPose(Pose setPose);
/**
* This calls an update to the Localizer, updating the current pose estimate and current velocity
* estimate.
*/
public abstract void update();
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public abstract double getTotalHeading();
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public abstract double getForwardMultiplier();
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public abstract double getLateralMultiplier();
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public abstract double getTurningMultiplier();
/**
* This resets the IMU of the localizer, if applicable.
*/
public abstract void resetIMU() throws InterruptedException;
/**
* This is overridden to return the IMU, if there is one.
*
* @return returns the IMU if it exists
*/
public IMU getIMU() {
return null;
}
}

View File

@ -0,0 +1,273 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization;
import java.util.Arrays;
/**
* This is the Matrix class. This defines matrices, primarily for use in the localizers. However, if
* matrices and matrix operations are necessary, this class as well as some operations in the
* MathFunctions class can absolutely be used there as well. It's similar to Mats in OpenCV if you've
* used them before, but with more limited functionality.
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
public class Matrix {
private double[][] matrix;
/**
* This creates a new Matrix of width and height 0.
*/
public Matrix() {
matrix = new double[0][0];
}
/**
* This creates a new Matrix with a specified width and height.
*
* @param rows the number of rows, or height
* @param columns the number of columns, or width
*/
public Matrix(int rows, int columns) {
matrix = new double[rows][columns];
}
/**
* This creates a new Matrix from a 2D matrix of doubles. Please only enter rectangular 2D
* Arrays of doubles or else things mess up.
*
* @param setMatrix the 2D Array of doubles
*/
public Matrix(double[][] setMatrix) {
setMatrix(setMatrix);
}
/**
* This creates a new Matrix from another Matrix.
*
* @param setMatrix the Matrix input.
*/
public Matrix(Matrix setMatrix) {
setMatrix(setMatrix);
}
/**
* This creates a copy of a 2D Array of doubles that references entirely new memory locations
* from the original 2D Array of doubles, so no issues with mutability.
*
* @param copyMatrix the 2D Array of doubles to copy
* @return returns a deep copy of the input Array
*/
public static double[][] deepCopy(double[][] copyMatrix) {
double[][] returnMatrix = new double[copyMatrix.length][copyMatrix[0].length];
for (int i = 0; i < copyMatrix.length; i++) {
returnMatrix[i] = Arrays.copyOf(copyMatrix[i], copyMatrix[i].length);
}
return returnMatrix;
}
/**
* This returns a deep copy of the 2D Array that this Matrix is based on.
*
* @return returns the 2D Array of doubles this Matrix is built on
*/
public double[][] getMatrix() {
return deepCopy(matrix);
}
/**
* This returns a specified row of the Matrix in the form of an Array of doubles.
*
* @param row the index of the row to return
* @return returns the row of the Matrix specified
*/
public double[] get(int row) {
return Arrays.copyOf(matrix[row], matrix[row].length);
}
/**
* This returns a specified element of the Matrix as a double.
*
* @param row the index of the row of the element
* @param column the index of the column of the element
* @return returns the element of the Matrix specified
*/
public double get(int row, int column) {
return get(row)[column];
}
/**
* This returns the number of rows of the Matrix, as determined by the length of the 2D Array.
* If the Matrix/2D Array is not rectangular, issues arise.
*
* @return returns the number of rows in the Matrix
*/
public int getRows() {
return matrix.length;
}
/**
* This returns the number of columns of the Matrix, as determined by the length of the first Array
* in the 2D Array. If the Matrix/2D Array is not rectangular, issues arise.
*
* @return returns the number of columns in the Matrix
*/
public int getColumns() {
return matrix[0].length;
}
/**
* This sets the 2D Array of this Matrix to a copy of the 2D Array of another Matrix.
*
* @param setMatrix the Matrix to copy from
* @return returns if the operation was successful
*/
public boolean setMatrix(Matrix setMatrix) {
return setMatrix(setMatrix.getMatrix());
}
/**
* This sets the 2D Array of this Matrix to a copy of a specified 2D Array.
*
* @param setMatrix the 2D Array to copy from
* @return returns if the operation was successful
*/
public boolean setMatrix(double[][] setMatrix) {
int columns = setMatrix[0].length;
for (int i = 0; i < setMatrix.length; i++) {
if (setMatrix[i].length != columns) {
return false;
}
}
matrix = deepCopy(setMatrix);
return true;
}
/**
* This sets a row of the Matrix to a copy of a specified Array of doubles.
*
* @param row the row to be written over
* @param input the Array input
* @return returns if the operation was successful
*/
public boolean set(int row, double[] input) {
if (input.length != getColumns()) {
return false;
}
matrix[row] = Arrays.copyOf(input, input.length);
return true;
}
/**
* This sets a specified element of the Matrix to an input value.
*
* @param row the index of the row of the specified element
* @param column the index of the column of the specified element
* @param input the input value
* @return returns if the operation was successful
*/
public boolean set(int row, int column, double input) {
matrix[row][column] = input;
return true;
}
/**
* This adds a Matrix to this Matrix.
*
* @param input the Matrix to add to this. Nothing will change in this Matrix
* @return returns if the operation was successful
*/
public boolean add(Matrix input) {
if (input.getRows() == getRows() && input.getColumns() == getColumns()) {
for (int i = 0; i < getRows(); i++) {
for (int j = 0; j < getColumns(); j++) {
set(i, j, get(i,j) + input.get(i,j));
}
}
return true;
}
return false;
}
/**
* This subtracts a Matrix from this Matrix.
*
* @param input the Matrix to subtract from this. Nothing will change in this Matrix
* @return returns if the operation was successful
*/
public boolean subtract(Matrix input) {
if (input.getRows() == getRows() && input.getColumns() == getColumns()) {
for (int i = 0; i < getRows(); i++) {
for (int j = 0; j < getColumns(); j++) {
set(i, j, get(i,j) - input.get(i,j));
}
}
return true;
}
return false;
}
/**
* This multiplies this Matrix with a scalar.
*
* @param scalar the scalar number
* @return returns if the operation was successful
*/
public boolean scalarMultiply(double scalar) {
for (int i = 0; i < getRows(); i++) {
for (int j = 0; j < getColumns(); j++) {
set(i, j, scalar * get(i,j));
}
}
return true;
}
/**
* This multiplies the Matrix by -1, flipping the signs of all the elements within.
*
* @return returns if the operation was successful
*/
public boolean flipSigns() {
return scalarMultiply(-1);
}
/**
* This multiplies a Matrix to this Matrix.
*
* @param input the Matrix to multiply to this. Nothing will change in this Matrix
* @return returns if the operation was successful
*/
public boolean multiply(Matrix input) {
if (getColumns() == input.getRows()) {
Matrix product = new Matrix(getRows(), input.getColumns());
for (int i = 0; i < product.getRows(); i++) {
for (int j = 0; j < product.getColumns(); j++) {
double value = 0;
for (int k = 0; k < get(i).length; k++) {
value += get(i, k) * input.get(k, j);
}
product.set(i, j, value);
}
}
setMatrix(product);
return true;
}
return false;
}
/**
* This multiplies a Matrix to another Matrix. This will not change any data in the two input
* Matrices.
*
* @param one the first Matrix to multiply.
* @param two the second Matrix to multiply
* @return returns if the operation was successful
*/
public static Matrix multiply(Matrix one, Matrix two) {
Matrix returnMatrix = new Matrix(one);
if (returnMatrix.multiply(two)) {
return returnMatrix;
} else {
return new Matrix();
}
}
}

View File

@ -0,0 +1,219 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization;
import androidx.annotation.NonNull;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
/**
* This is the Pose class. It defines poses in 2D space, like the Pose2D class in Road Runner except
* in the Pedro Pathing code so I don't have to import the Road Runner library. A Pose consists of
* two coordinates defining a position and a third value for the heading, so basically just defining
* any position and orientation the robot can be at, unless your robot can fly for whatever reason.
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
public class Pose {
private double x;
private double y;
private double heading;
/**
* This creates a new Pose from a x, y, and heading inputs.
*
* @param setX the initial x value
* @param setY the initial y value
* @param setHeading the initial heading value
*/
public Pose(double setX, double setY, double setHeading) {
setX(setX);
setY(setY);
setHeading(setHeading);
}
/**
* This creates a new Pose from x and y inputs. The heading is set to 0.
*
* @param setX the initial x value
* @param setY the initial y value
*/
public Pose(double setX, double setY) {
this(setX, setY, 0);
}
/**
* This creates a new Pose with no inputs and 0 for all values.
*/
public Pose() {
this(0, 0, 0);
}
/**
* This sets the x value.
*
* @param set the x value
*/
public void setX(double set) {
x = set;
}
/**
* This sets the y value.
*
* @param set the y value
*/
public void setY(double set) {
y = set;
}
/**
* This sets the heading value.
*
* @param set the heading value
*/
public void setHeading(double set) {
heading = MathFunctions.normalizeAngle(set);
}
/**
* This returns the x value.
*
* @return returns the x value
*/
public double getX() {
return x;
}
/**
* This returns the y value.
*
* @return returns the y value
*/
public double getY() {
return y;
}
/**
* This returns the heading value.
*
* @return returns the heading value
*/
public double getHeading() {
return heading;
}
/**
* This returns the Pose as a Vector. Naturally, the heading data in the Pose cannot be included
* in the Vector.
*
* @return returns the Pose as a Vector
*/
public Vector getVector() {
Vector returnVector = new Vector();
returnVector.setOrthogonalComponents(x, y);
return returnVector;
}
/**
* This returns a new Vector with magnitude 1 pointing in the direction of the heading.
*
* @return returns a unit Vector in the direction of the heading
*/
public Vector getHeadingVector() {
return new Vector(1, heading);
}
/**
* This adds all the values of an input Pose to this Pose. The input Pose's data will not be
* changed.
*
* @param pose the input Pose
*/
public void add(Pose pose) {
setX(x + pose.getX());
setY(y + pose.getY());
setHeading(heading + pose.getHeading());
}
/**
* This subtracts all the values of an input Pose from this Pose. The input Pose's data will not
* be changed.
*
* @param pose the input Pose
*/
public void subtract(Pose pose) {
setX(x - pose.getX());
setY(y - pose.getY());
setHeading(heading - pose.getHeading());
}
/**
* This multiplies all the values of this Pose by a scalar.
*
* @param scalar the scalar
*/
public void scalarMultiply(double scalar) {
setX(x * scalar);
setY(y * scalar);
setHeading(heading * scalar);
}
/**
* This divides all the values of this Pose by a scalar.
*
* @param scalar the scalar
*/
public void scalarDivide(double scalar) {
setX(x / scalar);
setY(y / scalar);
setHeading(heading / scalar);
}
/**
* This flips the signs of all values in this Pose by multiplying them by -1. Heading values are
* still normalized to be between 0 and 2 * pi in value.
*/
public void flipSigns() {
setX(-x);
setY(-y);
setHeading(-heading);
}
/**
* This returns if a Pose is within a specified accuracy of this Pose in terms of x position,
* y position, and heading.
*
* @param pose the input Pose to check
* @param accuracy the specified accuracy necessary to return true
* @return returns if the input Pose is within the specified accuracy of this Pose
*/
public boolean roughlyEquals(Pose pose, double accuracy) {
return MathFunctions.roughlyEquals(x, pose.getX(), accuracy) && MathFunctions.roughlyEquals(y, pose.getY(), accuracy) && MathFunctions.roughlyEquals(MathFunctions.getSmallestAngleDifference(heading, pose.getHeading()), 0, accuracy);
}
/**
* This checks if the input Pose is within 0.0001 in all values to this Pose.
*
* @param pose the input Pose
* @return returns if the input Pose is within 0.0001 of this Pose
*/
public boolean roughlyEquals(Pose pose) {
return roughlyEquals(pose, 0.0001);
}
/**
* This creates a copy of this Pose that points to a new memory location.
*
* @return returns a deep copy of this Pose
*/
public Pose copy() {
return new Pose(getX(), getY(), getHeading());
}
@NonNull
@Override
public String toString() {
return "(" + getX() + ", " + getY() + ", " + Math.toDegrees(getHeading()) + ")";
}
}

View File

@ -0,0 +1,357 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization;
import com.qualcomm.hardware.lynx.LynxModule;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.ThreeWheelIMULocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.ThreeWheelLocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers.TwoWheelLocalizer;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
/**
* This is the PoseUpdater class. This class handles getting pose data from the localizer and returning
* the information in a useful way to the Follower.
*
* @author Anyi Lin - 10158 Scott's Bots
* @author Aaron Yang - 10158 Scott's Bots
* @author Harrison Womack - 10158 Scott's Bots
* @version 1.0, 3/4/2024
*/
public class PoseUpdater {
private HardwareMap hardwareMap;
private IMU imu;
private Localizer localizer;
private Pose startingPose = new Pose(0,0,0);
private Pose currentPose = startingPose;
private Pose previousPose = startingPose;
private Vector currentVelocity = new Vector();
private Vector previousVelocity = new Vector();
private Vector currentAcceleration = new Vector();
private double xOffset = 0;
private double yOffset = 0;
private double headingOffset = 0;
private long previousPoseTime;
private long currentPoseTime;
/**
* Creates a new PoseUpdater from a HardwareMap and a Localizer.
*
* @param hardwareMap the HardwareMap
* @param localizer the Localizer
*/
public PoseUpdater(HardwareMap hardwareMap, Localizer localizer) {
this.hardwareMap = hardwareMap;
for (LynxModule module : hardwareMap.getAll(LynxModule.class)) {
module.setBulkCachingMode(LynxModule.BulkCachingMode.AUTO);
}
this.localizer = localizer;
imu = localizer.getIMU();
}
/**
* Creates a new PoseUpdater from a HardwareMap.
*
* @param hardwareMap the HardwareMap
*/
public PoseUpdater(HardwareMap hardwareMap) {
// TODO: replace the second argument with your preferred localizer
this(hardwareMap, new ThreeWheelLocalizer(hardwareMap));
}
/**
* This updates the robot's pose, as well as updating the previous pose, velocity, and
* acceleration. The cache for the current pose, velocity, and acceleration is cleared, and
* the time stamps are updated as well.
*/
public void update() {
previousVelocity = getVelocity();
previousPose = applyOffset(getRawPose());
currentPose = null;
currentVelocity = null;
currentAcceleration = null;
previousPoseTime = currentPoseTime;
currentPoseTime = System.nanoTime();
localizer.update();
}
/**
* This sets the starting pose. Do not run this after moving at all.
*
* @param set the Pose to set the starting pose to.
*/
public void setStartingPose(Pose set) {
startingPose = set;
previousPose = startingPose;
previousPoseTime = System.nanoTime();
currentPoseTime = System.nanoTime();
localizer.setStartPose(set);
}
/**
* This sets the current pose, using offsets. Think of using offsets as setting trim in an
* aircraft. This can be reset as well, so beware of using the resetOffset() method.
*
*
* @param set The pose to set the current pose to.
*/
public void setCurrentPoseWithOffset(Pose set) {
Pose currentPose = getRawPose();
setXOffset(set.getX() - currentPose.getX());
setYOffset(set.getY() - currentPose.getY());
setHeadingOffset(MathFunctions.getTurnDirection(currentPose.getHeading(), set.getHeading()) * MathFunctions.getSmallestAngleDifference(currentPose.getHeading(), set.getHeading()));
}
/**
* This sets the offset for only the x position.
*
* @param offset This sets the offset.
*/
public void setXOffset(double offset) {
xOffset = offset;
}
/**
* This sets the offset for only the y position.
*
* @param offset This sets the offset.
*/
public void setYOffset(double offset) {
yOffset = offset;
}
/**
* This sets the offset for only the heading.
*
* @param offset This sets the offset.
*/
public void setHeadingOffset(double offset) {
headingOffset = offset;
}
/**
* This returns the x offset.
*
* @return returns the x offset.
*/
public double getXOffset() {
return xOffset;
}
/**
* This returns the y offset.
*
* @return returns the y offset.
*/
public double getYOffset() {
return yOffset;
}
/**
* This returns the heading offset.
*
* @return returns the heading offset.
*/
public double getHeadingOffset() {
return headingOffset;
}
/**
* This applies the offset to a specified Pose.
*
* @param pose The pose to be offset.
* @return This returns a new Pose with the offset applied.
*/
public Pose applyOffset(Pose pose) {
return new Pose(pose.getX()+xOffset, pose.getY()+yOffset, pose.getHeading()+headingOffset);
}
/**
* This resets all offsets set to the PoseUpdater. If you have reset your pose using the
* setCurrentPoseUsingOffset(Pose2d set) method, then your pose will be returned to what the
* PoseUpdater thinks your pose would be, not the pose you reset to.
*/
public void resetOffset() {
setXOffset(0);
setYOffset(0);
setHeadingOffset(0);
}
/**
* This returns the current pose, with offsets applied. If this is called multiple times in
* a single update, the current pose is cached so that subsequent calls don't have to repeat
* localizer calls or calculations.
*
* @return returns the current pose.
*/
public Pose getPose() {
if (currentPose == null) {
currentPose = localizer.getPose();
return applyOffset(currentPose);
} else {
return applyOffset(currentPose);
}
}
/**
* This returns the current raw pose, without any offsets applied. If this is called multiple times in
* a single update, the current pose is cached so that subsequent calls don't have to repeat
* localizer calls or calculations.
*
* @return returns the raw pose.
*/
public Pose getRawPose() {
if (currentPose == null) {
currentPose = localizer.getPose();
return currentPose;
} else {
return currentPose;
}
}
/**
* This sets the current pose without using resettable offsets.
*
* @param set the pose to set the current pose to.
*/
public void setPose(Pose set) {
resetOffset();
localizer.setPose(set);
}
/**
* Returns the robot's pose from the previous update.
*
* @return returns the robot's previous pose.
*/
public Pose getPreviousPose() {
return previousPose;
}
/**
* Returns the robot's change in pose from the previous update.
*
* @return returns the robot's delta pose.
*/
public Pose getDeltaPose() {
Pose returnPose = getPose();
returnPose.subtract(previousPose);
return returnPose;
}
/**
* This returns the velocity of the robot as a Vector. If this is called multiple times in
* a single update, the velocity Vector is cached so that subsequent calls don't have to repeat
* localizer calls or calculations.
*
* @return returns the velocity of the robot.
*/
public Vector getVelocity() {
if (currentVelocity == null) {
// currentVelocity = new Vector();
// currentVelocity.setOrthogonalComponents(getPose().getX() - previousPose.getX(), getPose().getY() - previousPose.getY());
// currentVelocity.setMagnitude(MathFunctions.distance(getPose(), previousPose) / ((currentPoseTime - previousPoseTime) / Math.pow(10.0, 9)));
currentVelocity = localizer.getVelocityVector();
return MathFunctions.copyVector(currentVelocity);
} else {
return MathFunctions.copyVector(currentVelocity);
}
}
/**
* This returns the angular velocity of the robot as a double.
*
* @return returns the angular velocity of the robot.
*/
public double getAngularVelocity() {
return MathFunctions.getTurnDirection(previousPose.getHeading(), getPose().getHeading()) * MathFunctions.getSmallestAngleDifference(getPose().getHeading(), previousPose.getHeading()) / ((currentPoseTime-previousPoseTime)/Math.pow(10.0, 9));
}
/**
* This returns the acceleration of the robot as a Vector. If this is called multiple times in
* a single update, the acceleration Vector is cached so that subsequent calls don't have to
* repeat localizer calls or calculations.
*
* @return returns the acceleration of the robot.
*/
public Vector getAcceleration() {
if (currentAcceleration == null) {
currentAcceleration = MathFunctions.subtractVectors(getVelocity(), previousVelocity);
currentAcceleration.setMagnitude(currentAcceleration.getMagnitude() / ((currentPoseTime - previousPoseTime) / Math.pow(10.0, 9)));
return MathFunctions.copyVector(currentAcceleration);
} else {
return MathFunctions.copyVector(currentAcceleration);
}
}
/**
* This resets the heading of the robot to the IMU's heading, using Road Runner's pose reset.
*/
public void resetHeadingToIMU() {
if (imu != null) {
localizer.setPose(new Pose(getPose().getX(), getPose().getY(), getNormalizedIMUHeading() + startingPose.getHeading()));
}
}
/**
* This resets the heading of the robot to the IMU's heading, using offsets instead of Road
* Runner's pose reset. This way, it's faster, but this can be wiped with the resetOffsets()
* method.
*/
public void resetHeadingToIMUWithOffsets() {
if (imu != null) {
setCurrentPoseWithOffset(new Pose(getPose().getX(), getPose().getY(), getNormalizedIMUHeading() + startingPose.getHeading()));
}
}
/**
* This returns the IMU heading normalized to be between [0, 2 PI] radians.
*
* @return returns the normalized IMU heading.
*/
public double getNormalizedIMUHeading() {
if (imu != null) {
return MathFunctions.normalizeAngle(-imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS));
}
return 0;
}
/**
* This returns the total number of radians the robot has turned.
*
* @return the total heading.
*/
public double getTotalHeading() {
return localizer.getTotalHeading();
}
/**
* This returns the Localizer.
*
* @return the Localizer
*/
public Localizer getLocalizer() {
return localizer;
}
/**
*
*/
public void resetIMU() throws InterruptedException {
localizer.resetIMU();
}
}

View File

@ -0,0 +1,46 @@
package com.acmerobotics.roadrunner.ftc
import com.qualcomm.hardware.sparkfun.SparkFunOTOS
import com.qualcomm.robotcore.hardware.I2cDeviceSynch
import com.qualcomm.robotcore.hardware.configuration.annotations.DeviceProperties
import com.qualcomm.robotcore.hardware.configuration.annotations.I2cDeviceType
import java.util.*
@I2cDeviceType
@DeviceProperties(
name = "SparkFun OTOS Corrected",
xmlTag = "SparkFunOTOS2",
description = "SparkFun Qwiic Optical Tracking Odometry Sensor Corrected"
)
class SparkFunOTOSCorrected(deviceClient: I2cDeviceSynch) : SparkFunOTOS(deviceClient) {
/**
* Gets only the position and velocity measured by the
* OTOS in a single burst read
* @param pos Position measured by the OTOS
* @param vel Velocity measured by the OTOS
*/
fun getPosVel(pos: Pose2D, vel: Pose2D) {
// Read all pose registers
val rawData = deviceClient.read(REG_POS_XL.toInt(), 12)
// Convert raw data to pose units
pos.set(regsToPose(Arrays.copyOfRange(rawData, 0, 6), INT16_TO_METER, INT16_TO_RAD))
vel.set(regsToPose(Arrays.copyOfRange(rawData, 6, 12), INT16_TO_MPS, INT16_TO_RPS))
}
// Modified version of poseToRegs to fix pose setting issue
// see https://discord.com/channels/225450307654647808/1246977443030368349/1271702497659977760
override fun poseToRegs(rawData: ByteArray, pose: Pose2D, xyToRaw: Double, hToRaw: Double) {
// Convert pose units to raw data
val rawX = (_distanceUnit.toMeters(pose.x) * xyToRaw).toInt().toShort()
val rawY = (_distanceUnit.toMeters(pose.y) * xyToRaw).toInt().toShort()
val rawH = (_angularUnit.toRadians(pose.h) * hToRaw).toInt().toShort()
// Store raw data in buffer
rawData[0] = (rawX.toInt() and 0xFF).toByte()
rawData[1] = ((rawX.toInt() shr 8) and 0xFF).toByte()
rawData[2] = (rawY.toInt() and 0xFF).toByte()
rawData[3] = ((rawY.toInt() shr 8) and 0xFF).toByte()
rawData[4] = (rawH.toInt() and 0xFF).toByte()
rawData[5] = ((rawH.toInt() shr 8) and 0xFF).toByte()
}
}

View File

@ -0,0 +1,272 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.leftRearMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightFrontMotorName;
import static org.firstinspires.ftc.teamcode.pedroPathing.tuning.FollowerConstants.rightRearMotorName;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Encoder;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Matrix;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
/**
* This is the DriveEncoderLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the drive encoder set up.
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 4/2/2024
*/
@Config
public class DriveEncoderLocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private Pose displacementPose;
private Pose currentVelocity;
private Matrix prevRotationMatrix;
private NanoTimer timer;
private long deltaTimeNano;
private Encoder leftFront;
private Encoder rightFront;
private Encoder leftRear;
private Encoder rightRear;
private double totalHeading;
public static double FORWARD_TICKS_TO_INCHES = 1;
public static double STRAFE_TICKS_TO_INCHES = 1;
public static double TURN_TICKS_TO_RADIANS = 1;
public static double ROBOT_WIDTH = 1;
public static double ROBOT_LENGTH = 1;
/**
* This creates a new DriveEncoderLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public DriveEncoderLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new DriveEncoderLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public DriveEncoderLocalizer(HardwareMap map, Pose setStartPose) {
hardwareMap = map;
leftFront = new Encoder(hardwareMap.get(DcMotorEx.class, leftFrontMotorName));
leftRear = new Encoder(hardwareMap.get(DcMotorEx.class, leftRearMotorName));
rightRear = new Encoder(hardwareMap.get(DcMotorEx.class, rightRearMotorName));
rightFront = new Encoder(hardwareMap.get(DcMotorEx.class, rightFrontMotorName));
// TODO: reverse any encoders necessary
leftFront.setDirection(Encoder.REVERSE);
leftRear.setDirection(Encoder.REVERSE);
rightFront.setDirection(Encoder.FORWARD);
rightRear.setDirection(Encoder.FORWARD);
setStartPose(setStartPose);
timer = new NanoTimer();
deltaTimeNano = 1;
displacementPose = new Pose();
currentVelocity = new Pose();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, displacementPose);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the Matrix that contains the previous pose's heading rotation.
*
* @param heading the rotation of the Matrix
*/
public void setPrevRotationMatrix(double heading) {
prevRotationMatrix = new Matrix(3,3);
prevRotationMatrix.set(0, 0, Math.cos(heading));
prevRotationMatrix.set(0, 1, -Math.sin(heading));
prevRotationMatrix.set(1, 0, Math.sin(heading));
prevRotationMatrix.set(1, 1, Math.cos(heading));
prevRotationMatrix.set(2, 2, 1.0);
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
displacementPose = MathFunctions.subtractPoses(setPose, startPose);
resetEncoders();
}
/**
* This updates the elapsed time timer that keeps track of time between updates, as well as the
* change position of the Encoders. Then, the robot's global change in position is calculated
* using the pose exponential method.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
updateEncoders();
Matrix robotDeltas = getRobotDeltas();
Matrix globalDeltas;
setPrevRotationMatrix(getPose().getHeading());
Matrix transformation = new Matrix(3,3);
if (Math.abs(robotDeltas.get(2, 0)) < 0.001) {
transformation.set(0, 0, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(0, 1, -robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 0, robotDeltas.get(2, 0) / 2.0);
transformation.set(1, 1, 1.0 - (Math.pow(robotDeltas.get(2, 0), 2) / 6.0));
transformation.set(2, 2, 1.0);
} else {
transformation.set(0, 0, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(0, 1, (Math.cos(robotDeltas.get(2, 0)) - 1.0) / robotDeltas.get(2, 0));
transformation.set(1, 0, (1.0 - Math.cos(robotDeltas.get(2, 0))) / robotDeltas.get(2, 0));
transformation.set(1, 1, Math.sin(robotDeltas.get(2, 0)) / robotDeltas.get(2, 0));
transformation.set(2, 2, 1.0);
}
globalDeltas = Matrix.multiply(Matrix.multiply(prevRotationMatrix, transformation), robotDeltas);
displacementPose.add(new Pose(globalDeltas.get(0, 0), globalDeltas.get(1, 0), globalDeltas.get(2, 0)));
currentVelocity = new Pose(globalDeltas.get(0, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(1, 0) / (deltaTimeNano / Math.pow(10.0, 9)), globalDeltas.get(2, 0) / (deltaTimeNano / Math.pow(10.0, 9)));
totalHeading += globalDeltas.get(2, 0);
}
/**
* This updates the Encoders.
*/
public void updateEncoders() {
leftFront.update();
rightFront.update();
leftRear.update();
rightRear.update();
}
/**
* This resets the Encoders.
*/
public void resetEncoders() {
leftFront.reset();
rightFront.reset();
leftRear.reset();
rightRear.reset();
}
/**
* This calculates the change in position from the perspective of the robot using information
* from the Encoders.
*
* @return returns a Matrix containing the robot relative movement.
*/
public Matrix getRobotDeltas() {
Matrix returnMatrix = new Matrix(3,1);
// x/forward movement
returnMatrix.set(0,0, FORWARD_TICKS_TO_INCHES * (leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() + rightRear.getDeltaPosition()));
//y/strafe movement
returnMatrix.set(1,0, STRAFE_TICKS_TO_INCHES * (-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() + leftRear.getDeltaPosition() - rightRear.getDeltaPosition()));
// theta/turning
returnMatrix.set(2,0, TURN_TICKS_TO_RADIANS * ((-leftFront.getDeltaPosition() + rightFront.getDeltaPosition() - leftRear.getDeltaPosition() + rightRear.getDeltaPosition()) / (ROBOT_WIDTH + ROBOT_LENGTH)));
return returnMatrix;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from encoder
* ticks to inches. This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return FORWARD_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* encoder ticks to inches. This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return STRAFE_TICKS_TO_INCHES;
}
/**
* This returns the multiplier applied to turning movement measurement to convert from encoder
* ticks to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return TURN_TICKS_TO_RADIANS;
}
/**
* This does nothing since this localizer does not use the IMU.
*/
public void resetIMU() {
}
}

View File

@ -0,0 +1,227 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import com.qualcomm.hardware.sparkfun.SparkFunOTOS;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
/**
* This is the OTOSLocalizer class. This class extends the Localizer superclass and is a
* localizer that uses the SparkFun OTOS. The diagram below, which is modified from
* Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* forward (x positive)
* △
* |
* |
* /--------------\
* | |
* | |
* | || || |
* left (y positive) <--- | || || |
* | ____ |
* | ---- |
* \--------------/
*
* @author Anyi Lin - 10158 Scott's Bots
* @version 1.0, 7/20/2024
*/
public class OTOSLocalizer extends Localizer {
private HardwareMap hardwareMap;
private Pose startPose;
private SparkFunOTOS otos;
private SparkFunOTOS.Pose2D otosPose;
private SparkFunOTOS.Pose2D otosVel;
private SparkFunOTOS.Pose2D otosAcc;
private double previousHeading;
private double totalHeading;
/**
* This creates a new OTOSLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public OTOSLocalizer(HardwareMap map) {
this(map, new Pose());
}
/**
* This creates a new OTOSLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public OTOSLocalizer(HardwareMap map, Pose setStartPose) {
hardwareMap = map;
/*
TODO: If you want to use the "SparkFunOTOSCorrected" version of OTOS, then replace the
'SparkFunOTOS.class' below with 'SparkFunOTOSCorrected.class' and set the OTOS as a
"SparkFunOTOS Corrected" in your robot config
*/
// TODO: replace this with your OTOS port
otos = hardwareMap.get(SparkFunOTOS.class, "sensor_otos");
otos.setLinearUnit(DistanceUnit.INCH);
otos.setAngularUnit(AngleUnit.RADIANS);
// TODO: replace this with your OTOS offset from the center of the robot
// For the OTOS, left/right is the y axis and forward/backward is the x axis, with left being
// positive y and forward being positive x. PI/2 radians is facing forward, and clockwise
// rotation is negative rotation.
otos.setOffset(new SparkFunOTOS.Pose2D(0,0,Math.PI / 2));
// TODO: replace these with your tuned multipliers
otos.setLinearScalar(1.0);
otos.setAngularScalar(1.0);
otos.calibrateImu();
otos.resetTracking();
setStartPose(setStartPose);
otosPose = new SparkFunOTOS.Pose2D();
otosVel = new SparkFunOTOS.Pose2D();
otosAcc = new SparkFunOTOS.Pose2D();
totalHeading = 0;
previousHeading = startPose.getHeading();
resetOTOS();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
Pose pose = new Pose(otosPose.x, otosPose.y, otosPose.h);
Vector vec = pose.getVector();
vec.rotateVector(startPose.getHeading());
return MathFunctions.addPoses(startPose, new Pose(vec.getXComponent(), vec.getYComponent(), pose.getHeading()));
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return new Pose(otosVel.x, otosVel.y, otosVel.h);
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return getVelocity().getVector();
}
/**
* This sets the start pose. Changing the start pose should move the robot as if all its
* previous movements were displacing it from its new start pose.
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
startPose = setStart;
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
resetOTOS();
Pose setOTOSPose = MathFunctions.subtractPoses(setPose, startPose);
otos.setPosition(new SparkFunOTOS.Pose2D(setOTOSPose.getX(), setOTOSPose.getY(), setOTOSPose.getHeading()));
}
/**
* This updates the total heading of the robot. The OTOS handles all other updates itself.
*/
@Override
public void update() {
otos.getPosVelAcc(otosPose,otosVel,otosAcc);
totalHeading += MathFunctions.getSmallestAngleDifference(otosPose.h, previousHeading);
previousHeading = otosPose.h;
}
/**
* This resets the OTOS.
*/
public void resetOTOS() {
otos.resetTracking();
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the multiplier applied to forward movement measurement to convert from OTOS
* ticks to inches. For the OTOS, this value is the same as the lateral multiplier.
* This is found empirically through a tuner.
*
* @return returns the forward ticks to inches multiplier
*/
public double getForwardMultiplier() {
return otos.getLinearScalar();
}
/**
* This returns the multiplier applied to lateral/strafe movement measurement to convert from
* OTOS ticks to inches. For the OTOS, this value is the same as the forward multiplier.
* This is found empirically through a tuner.
*
* @return returns the lateral/strafe ticks to inches multiplier
*/
public double getLateralMultiplier() {
return otos.getLinearScalar();
}
/**
* This returns the multiplier applied to turning movement measurement to convert from OTOS ticks
* to radians. This is found empirically through a tuner.
*
* @return returns the turning ticks to radians multiplier
*/
public double getTurningMultiplier() {
return otos.getAngularScalar();
}
/**
* This does nothing since this localizer does not use the IMU.
*/
public void resetIMU() {
}
}

View File

@ -0,0 +1,245 @@
package org.firstinspires.ftc.teamcode.pedroPathing.localization.localizers;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.robotcore.external.navigation.Pose2D;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.GoBildaPinpointDriver;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Localizer;
import org.firstinspires.ftc.teamcode.pedroPathing.localization.Pose;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
import org.firstinspires.ftc.teamcode.pedroPathing.util.NanoTimer;
import org.opencv.core.Mat;
/**
* This is the Pinpoint class. This class extends the Localizer superclass and is a
* localizer that uses the two wheel odometry set up with the IMU to have more accurate heading
* readings. The diagram below, which is modified from Road Runner, shows a typical set up.
*
* The view is from the top of the robot looking downwards.
*
* left on robot is the y positive direction
*
* forward on robot is the x positive direction
*
* /--------------\
* | ____ |
* | ---- |
* | || |
* | || | ----> left (y positive)
* | |
* | |
* \--------------/
* |
* |
* V
* forward (x positive)
* With the pinpoint your readings will be used in mm
* to use inches ensure to divide your mm value by 25.4
* @author Logan Nash
* @author Havish Sripada 12808 - RevAmped Robotics
* @author Ethan Doak - Gobilda
* @version 1.0, 10/2/2024
*/
public class PinpointLocalizer extends Localizer {
private HardwareMap hardwareMap;
private GoBildaPinpointDriver odo;
private double previousHeading;
private double totalHeading;
private Pose startPose;
private long deltaTimeNano;
private NanoTimer timer;
private Pose currentVelocity;
private Pose previousPinpointPose;
/**
* This creates a new PinpointLocalizer from a HardwareMap, with a starting Pose at (0,0)
* facing 0 heading.
*
* @param map the HardwareMap
*/
public PinpointLocalizer(HardwareMap map){ this(map, new Pose());}
/**
* This creates a new PinpointLocalizer from a HardwareMap and a Pose, with the Pose
* specifying the starting pose of the localizer.
*
* @param map the HardwareMap
* @param setStartPose the Pose to start from
*/
public PinpointLocalizer(HardwareMap map, Pose setStartPose){
hardwareMap = map;
odo = hardwareMap.get(GoBildaPinpointDriver.class,"pinpoint");
//The default units are inches, but you can swap the units if you wish.
//If you have already tuned the TwoWheelLocalizer, you can simply use the forwardEncoderPose's y value and strafeEncoderPose's x values.
setOffsets(-2.815, 0.125, DistanceUnit.INCH); //these are tuned for 3110-0002-0001 Product Insight #1
//TODO: Tune urself if needed
// odo.setYawScalar(1.0);
odo.setEncoderResolution(GoBildaPinpointDriver.GoBildaOdometryPods.goBILDA_4_BAR_POD);
//odo.setEncoderResolution(13.26291192);
odo.setEncoderDirections(GoBildaPinpointDriver.EncoderDirection.REVERSED, GoBildaPinpointDriver.EncoderDirection.FORWARD);
resetPinpoint();
setStartPose(setStartPose);
totalHeading = 0;
timer = new NanoTimer();
previousPinpointPose = new Pose();
currentVelocity = new Pose();
deltaTimeNano = 1;
previousHeading = setStartPose.getHeading();
}
/**
* This returns the current pose estimate.
*
* @return returns the current pose estimate as a Pose
*/
@Override
public Pose getPose() {
return MathFunctions.addPoses(startPose, MathFunctions.rotatePose(previousPinpointPose, startPose.getHeading(), false));
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Pose
*/
@Override
public Pose getVelocity() {
return currentVelocity.copy();
}
/**
* This returns the current velocity estimate.
*
* @return returns the current velocity estimate as a Vector
*/
@Override
public Vector getVelocityVector() {
return currentVelocity.getVector();
}
/**
* This sets the start pose. Since nobody should be using this after the robot has begun moving,
* and due to issues with the PinpointLocalizer, this is functionally the same as setPose(Pose).
*
* @param setStart the new start pose
*/
@Override
public void setStartPose(Pose setStart) {
this.startPose = setStart;
}
/**
* This sets the current pose estimate. Changing this should just change the robot's current
* pose estimate, not anything to do with the start pose.
*
* @param setPose the new current pose estimate
*/
@Override
public void setPose(Pose setPose) {
Pose setNewPose = MathFunctions.subtractPoses(setPose, startPose);
odo.setPosition(new Pose2D(DistanceUnit.INCH, setNewPose.getX(), setNewPose.getY(), AngleUnit.RADIANS, setNewPose.getHeading()));
}
/**
* This updates the total heading of the robot. The Pinpoint handles all other updates itself.
*/
@Override
public void update() {
deltaTimeNano = timer.getElapsedTime();
timer.resetTimer();
odo.update();
Pose2D pinpointPose = odo.getPosition();
Pose currentPinpointPose = new Pose(pinpointPose.getX(DistanceUnit.INCH), pinpointPose.getY(DistanceUnit.INCH), pinpointPose.getHeading(AngleUnit.RADIANS));
totalHeading += MathFunctions.getSmallestAngleDifference(currentPinpointPose.getHeading(), previousHeading);
previousHeading = currentPinpointPose.getHeading();
Pose deltaPose = MathFunctions.subtractPoses(currentPinpointPose, previousPinpointPose);
currentVelocity = new Pose(deltaPose.getX() / (deltaTimeNano / Math.pow(10.0, 9)), deltaPose.getY() / (deltaTimeNano / Math.pow(10.0, 9)), deltaPose.getHeading() / (deltaTimeNano / Math.pow(10.0, 9)));
previousPinpointPose = currentPinpointPose;
}
/**
* This returns how far the robot has turned in radians, in a number not clamped between 0 and
* 2 * pi radians. This is used for some tuning things and nothing actually within the following.
*
* @return returns how far the robot has turned in total, in radians.
*/
@Override
public double getTotalHeading() {
return totalHeading;
}
/**
* This returns the Y encoder value as none of the odometry tuners are required for this localizer
* @return returns the Y encoder value
*/
@Override
public double getForwardMultiplier() {
return odo.getEncoderY();
}
/**
* This returns the X encoder value as none of the odometry tuners are required for this localizer
* @return returns the X encoder value
*/
@Override
public double getLateralMultiplier() {
return odo.getEncoderX();
}
/**
* This returns either the factory tuned yaw scalar or the yaw scalar tuned by yourself.
* @return returns the yaw scalar
*/
@Override
public double getTurningMultiplier() {
return odo.getYawScalar();
}
/**
* This sets the offsets and converts inches to millimeters
* @param xOffset How far to the side from the center of the robot is the x-pod? Use positive values if it's to the left and negative if it's to the right.
* @param yOffset How far forward from the center of the robot is the y-pod? Use positive values if it's forward and negative if it's to the back.
* @param unit The units that the measurements are given in
*/
private void setOffsets(double xOffset, double yOffset, DistanceUnit unit) {
odo.setOffsets(unit.toMm(xOffset), unit.toMm(yOffset));
}
/**
* This resets the IMU. Does not change heading estimation.
*/
@Override
public void resetIMU() throws InterruptedException {
odo.recalibrateIMU();
try {
Thread.sleep(300);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
/**
* This resets the pinpoint.
*/
private void resetPinpoint() {
odo.resetPosAndIMU();
try {
Thread.sleep(300);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}

View File

@ -0,0 +1,159 @@
//package org.firstinspires.ftc.teamcode.pedroPathing.localization;
//
//import com.acmerobotics.roadrunner.geometry.Pose2d;
//import com.qualcomm.robotcore.hardware.HardwareMap;
//
//import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.MathFunctions;
//import org.firstinspires.ftc.teamcode.pedroPathing.pathGeneration.Vector;
//
//import java.util.ArrayList;
//import java.util.List;
//
///**
// * This is the RRToPedroThreeWheelLocalizer class. This class extends the Localizer superclass and
// * is intended to adapt the old Road Runner three wheel odometry localizer to the new Pedro Pathing
// * localizer system.
// *
// * @author Anyi Lin - 10158 Scott's Bots
// * @version 1.0, 5/9/2024
// */
//public class RRToPedroThreeWheelLocalizer extends Localizer {
// private RoadRunnerThreeWheelLocalizer localizer;
// private double totalHeading;
// private Pose startPose;
// private Pose previousPose;
//
// /**
// * This creates a new RRToPedroThreeWheelLocalizer from a HardwareMap. This adapts the previously
// * used Road Runner localization system to the new Pedro Pathing localization system.
// *
// * @param hardwareMap the HardwareMap
// */
// public RRToPedroThreeWheelLocalizer(HardwareMap hardwareMap) {
// List<Integer> lastTrackingEncPositions = new ArrayList<>();
// List<Integer> lastTrackingEncVels = new ArrayList<>();
//
// localizer = new RoadRunnerThreeWheelLocalizer(hardwareMap, lastTrackingEncPositions, lastTrackingEncVels);
//
// startPose = new Pose();
// previousPose = new Pose();
// }
//
// /**
// * This returns the current pose estimate as a Pose.
// *
// * @return returns the current pose estimate
// */
// @Override
// public Pose getPose() {
// Pose2d pose = localizer.getPoseEstimate();
// return new Pose(pose.getX(), pose.getY(), pose.getHeading());
// }
//
// /**
// * This returns the current velocity estimate as a Pose.
// *
// * @return returns the current velocity estimate
// */
// @Override
// public Pose getVelocity() {
// Pose2d pose = localizer.getPoseVelocity();
// return new Pose(pose.getX(), pose.getY(), pose.getHeading());
// }
//
// /**
// * This returns the current velocity estimate as a Vector.
// *
// * @return returns the current velocity estimate
// */
// @Override
// public Vector getVelocityVector() {
// Pose2d pose = localizer.getPoseVelocity();
// Vector returnVector = new Vector();
// returnVector.setOrthogonalComponents(pose.getX(), pose.getY());
// return returnVector;
// }
//
// /**
// * This sets the start pose. Any movement of the robot is treated as a displacement from the
// * start pose, so moving the start pose will move the current pose estimate the same amount.
// *
// * @param setStart the new start pose
// */
// @Override
// public void setStartPose(Pose setStart) {
// Pose oldStart = startPose;
// startPose = setStart;
// Pose startDiff = MathFunctions.subtractPoses(startPose, oldStart);
// localizer.setPoseEstimate(new Pose2d(getPose().getX() + startDiff.getX(), getPose().getY() + startDiff.getY(), getPose().getHeading() + startDiff.getHeading()));
// }
//
// /**
// * This sets the current pose estimate. This has no effect on the start pose.
// *
// * @param setPose the new current pose estimate
// */
// @Override
// public void setPose(Pose setPose) {
// localizer.setPoseEstimate(new Pose2d(setPose.getX(), setPose.getY(), setPose.getHeading()));
// }
//
// /**
// * This updates the total heading and previous pose estimate. Everything else is handled by the
// * Road Runner localizer on its own, but updating this tells you how far the robot has really
// * turned.
// */
// @Override
// public void update() {
// totalHeading += MathFunctions.getTurnDirection(previousPose.getHeading(), getPose().getHeading()) * MathFunctions.getSmallestAngleDifference(previousPose.getHeading(), getPose().getHeading());
// previousPose = getPose();
// }
//
// /**
// * This returns how far the robot has actually turned.
// *
// * @return returns the total angle turned, in degrees.
// */
// @Override
// public double getTotalHeading() {
// return totalHeading;
// }
//
// /**
// * This returns the forward multiplier of the Road Runner localizer, which converts from ticks
// * to inches. You can actually use the tuners in Pedro Pathing to find the value that everything
// * multiplied together should be. If you do use that, then do be aware that the value returned is
// * the product of the Road Runner ticks to inches and the x multiplier.
// *
// * @return returns the forward multiplier
// */
// @Override
// public double getForwardMultiplier() {
// return RoadRunnerThreeWheelLocalizer.encoderTicksToInches(1) * RoadRunnerThreeWheelLocalizer.X_MULTIPLIER;
// }
//
// /**
// * This returns the lateral multiplier of the Road Runner localizer, which converts from ticks
// * to inches. You can actually use the tuners in Pedro Pathing to find the value that everything
// * multiplied together should be. If you do use that, then do be aware that the value returned is
// * the product of the Road Runner ticks to inches and the y multiplier.
// *
// * @return returns the lateral multiplier
// */
// @Override
// public double getLateralMultiplier() {
// return RoadRunnerThreeWheelLocalizer.encoderTicksToInches(1) * RoadRunnerThreeWheelLocalizer.Y_MULTIPLIER;
// }
//
// /**
// * This returns the turning multiplier of the Road Runner localizer, which doesn't actually exist.
// * There really isn't a point in tuning the turning for the Road Runner localizer. This will
// * actually just return the average of the two other multipliers.
// *
// * @return returns the turning multiplier
// */
// @Override
// public double getTurningMultiplier() {
// return (getForwardMultiplier() + getLateralMultiplier()) / 2;
// }
//}

View File

@ -0,0 +1,132 @@
//package org.firstinspires.ftc.teamcode.pedroPathing.localization;
//
//import com.acmerobotics.roadrunner.util.NanoClock;
//import com.qualcomm.robotcore.hardware.DcMotorEx;
//import com.qualcomm.robotcore.hardware.DcMotorSimple;
//
///**
// * This class is adapted from the Road Runner Encoder class. Later, this will be replaced with a
// * custom encoder class. According to Road Runner, this wraps a motor instance to provide corrected
// * velocity counts and allow reversing independently of the corresponding slot's motor direction.
// *
// * I'm fairly sure I didn't make any changes to this class, just copied it so I wouldn't have to have
// * import statements, so I'm not crediting myself as an author for this.
// *
// * @author Road Runner dev team
// * @version 1.0, 5/9/2024
// */
//public class RoadRunnerEncoder {
// private final static int CPS_STEP = 0x10000;
//
// private static double inverseOverflow(double input, double estimate) {
// // convert to uint16
// int real = (int) input & 0xffff;
// // initial, modulo-based correction: it can recover the remainder of 5 of the upper 16 bits
// // because the velocity is always a multiple of 20 cps due to Expansion Hub's 50ms measurement window
// real += ((real % 20) / 4) * CPS_STEP;
// // estimate-based correction: it finds the nearest multiple of 5 to correct the upper bits by
// real += Math.round((estimate - real) / (5 * CPS_STEP)) * 5 * CPS_STEP;
// return real;
// }
//
// public enum Direction {
// FORWARD(1),
// REVERSE(-1);
//
// private int multiplier;
//
// Direction(int multiplier) {
// this.multiplier = multiplier;
// }
//
// public int getMultiplier() {
// return multiplier;
// }
// }
//
// private DcMotorEx motor;
// private NanoClock clock;
//
// private Direction direction;
//
// private int lastPosition;
// private int velocityEstimateIdx;
// private double[] velocityEstimates;
// private double lastUpdateTime;
//
// public RoadRunnerEncoder(DcMotorEx motor, NanoClock clock) {
// this.motor = motor;
// this.clock = clock;
//
// this.direction = Direction.FORWARD;
//
// this.lastPosition = 0;
// this.velocityEstimates = new double[3];
// this.lastUpdateTime = clock.seconds();
// }
//
// public RoadRunnerEncoder(DcMotorEx motor) {
// this(motor, NanoClock.system());
// }
//
// public Direction getDirection() {
// return direction;
// }
//
// private int getMultiplier() {
// return getDirection().getMultiplier() * (motor.getDirection() == DcMotorSimple.Direction.FORWARD ? 1 : -1);
// }
//
// /**
// * Allows you to set the direction of the counts and velocity without modifying the motor's direction state
// * @param direction either reverse or forward depending on if encoder counts should be negated
// */
// public void setDirection(Direction direction) {
// this.direction = direction;
// }
//
// /**
// * Gets the position from the underlying motor and adjusts for the set direction.
// * Additionally, this method updates the velocity estimates used for compensated velocity
// *
// * @return encoder position
// */
// public int getCurrentPosition() {
// int multiplier = getMultiplier();
// int currentPosition = motor.getCurrentPosition() * multiplier;
// if (currentPosition != lastPosition) {
// double currentTime = clock.seconds();
// double dt = currentTime - lastUpdateTime;
// velocityEstimates[velocityEstimateIdx] = (currentPosition - lastPosition) / dt;
// velocityEstimateIdx = (velocityEstimateIdx + 1) % 3;
// lastPosition = currentPosition;
// lastUpdateTime = currentTime;
// }
// return currentPosition;
// }
//
// /**
// * Gets the velocity directly from the underlying motor and compensates for the direction
// * See {@link #getCorrectedVelocity} for high (>2^15) counts per second velocities (such as on REV Through Bore)
// *
// * @return raw velocity
// */
// public double getRawVelocity() {
// int multiplier = getMultiplier();
// return motor.getVelocity() * multiplier;
// }
//
// /**
// * Uses velocity estimates gathered in {@link #getCurrentPosition} to estimate the upper bits of velocity
// * that are lost in overflow due to velocity being transmitted as 16 bits.
// * CAVEAT: must regularly call {@link #getCurrentPosition} for the compensation to work correctly.
// *
// * @return corrected velocity
// */
// public double getCorrectedVelocity() {
// double median = velocityEstimates[0] > velocityEstimates[1]
// ? Math.max(velocityEstimates[1], Math.min(velocityEstimates[0], velocityEstimates[2]))
// : Math.max(velocityEstimates[0], Math.min(velocityEstimates[1], velocityEstimates[2]));
// return inverseOverflow(getRawVelocity(), median);
// }
//}

View File

@ -0,0 +1,123 @@
//package org.firstinspires.ftc.teamcode.pedroPathing.localization;
//
//import androidx.annotation.NonNull;
//
//import com.acmerobotics.dashboard.config.Config;
//import com.acmerobotics.roadrunner.geometry.Pose2d;
//import com.acmerobotics.roadrunner.localization.ThreeTrackingWheelLocalizer;
//import com.qualcomm.robotcore.hardware.DcMotorEx;
//import com.qualcomm.robotcore.hardware.HardwareMap;
//
//import java.util.Arrays;
//import java.util.List;
//
///*
// * Sample tracking wheel localizer implementation assuming the standard configuration:
// *
// * left on robot is y pos
// *
// * front of robot is x pos
// *
// * /--------------\
// * | ____ |
// * | ---- |
// * | || || |
// * | || || |
// * | |
// * | |
// * \--------------/
// *
// */
//
///**
// * This class is adapted from the Road Runner StandardTrackingWheelLocalizer class. Later, this will
// * be replaced with a custom localizer. I made some minor changes, so I'm crediting myself as an
// * 'author' of sorts, but really this is pretty much Road Runner's code, just moved to be local to
// * Pedro Pathing to avoid having imports.
// *
// * @author Road Runner dev team
// * @author Anyi Lin - 10158 Scott's Bots
// * @version 1.0, 5/9/2024
// */
//@Config
//public class RoadRunnerThreeWheelLocalizer extends ThreeTrackingWheelLocalizer {
// public static double TICKS_PER_REV = 8192;
// public static double WHEEL_RADIUS = 1.37795; // in
// public static double GEAR_RATIO = 1; // output (wheel) speed / input (encoder) speed
//
// public static double X_MULTIPLIER = 0.5008239963;
// public static double Y_MULTIPLIER = 0.5018874659;
//
// public static double leftX = -18.5/25.4 - 0.1, leftY = 164.4/25.4, rightX = -18.4/25.4 - 0.1, rightY = -159.6/25.4, strafeX = -107.9/25.4+0.25, strafeY = -1.1/25.4-0.23;
//
// private RoadRunnerEncoder leftEncoder, rightEncoder, strafeEncoder;
//
// private List<Integer> lastEncPositions, lastEncVels;
//
// public RoadRunnerThreeWheelLocalizer(HardwareMap hardwareMap, List<Integer> lastTrackingEncPositions, List<Integer> lastTrackingEncVels) {
// super(Arrays.asList(
// new Pose2d(leftX, leftY, 0), // left
// new Pose2d(rightX, rightY, 0), // right
// new Pose2d(strafeX, strafeY, Math.toRadians(90)) // strafe
// ));
//
// lastEncPositions = lastTrackingEncPositions;
// lastEncVels = lastTrackingEncVels;
//
// // TODO: redo the configs here
// leftEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "leftRear"));
// rightEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "rightFront"));
// strafeEncoder = new RoadRunnerEncoder(hardwareMap.get(DcMotorEx.class, "strafeEncoder"));
//
// // TODO: reverse any encoders using Encoder.setDirection(Encoder.Direction.REVERSE)
// leftEncoder.setDirection(RoadRunnerEncoder.Direction.REVERSE);
// rightEncoder.setDirection(RoadRunnerEncoder.Direction.REVERSE);
// strafeEncoder.setDirection(RoadRunnerEncoder.Direction.FORWARD);
// }
//
// public void resetHeading(double heading) {
// setPoseEstimate(new Pose2d(getPoseEstimate().getX(), getPoseEstimate().getY(), heading));
// }
//
// public static double encoderTicksToInches(double ticks) {
// return WHEEL_RADIUS * 2 * Math.PI * GEAR_RATIO * ticks / TICKS_PER_REV;
// }
//
// @NonNull
// @Override
// public List<Double> getWheelPositions() {
// int leftPos = leftEncoder.getCurrentPosition();
// int rightPos = rightEncoder.getCurrentPosition();
// int frontPos = strafeEncoder.getCurrentPosition();
//
// lastEncPositions.clear();
// lastEncPositions.add(leftPos);
// lastEncPositions.add(rightPos);
// lastEncPositions.add(frontPos);
//
// return Arrays.asList(
// encoderTicksToInches(leftPos) * X_MULTIPLIER,
// encoderTicksToInches(rightPos) * X_MULTIPLIER,
// encoderTicksToInches(frontPos) * Y_MULTIPLIER
// );
// }
//
// @NonNull
// @Override
// public List<Double> getWheelVelocities() {
// int leftVel = (int) leftEncoder.getCorrectedVelocity();
// int rightVel = (int) rightEncoder.getCorrectedVelocity();
// int frontVel = (int) strafeEncoder.getCorrectedVelocity();
//
// lastEncVels.clear();
// lastEncVels.add(leftVel);
// lastEncVels.add(rightVel);
// lastEncVels.add(frontVel);
//
// return Arrays.asList(
// encoderTicksToInches(leftVel) * X_MULTIPLIER,
// encoderTicksToInches(rightVel) * X_MULTIPLIER,
// encoderTicksToInches(frontVel) * Y_MULTIPLIER
// );
// }
//}

Some files were not shown because too many files have changed in this diff Show More