Compare commits

...

11 Commits

Author SHA1 Message Date
e0c362b883 Merge pull request #1 from furritos/fix-java-class
Remove errant dash from class name
2023-09-05 21:33:34 -07:00
667af65a06 Remove errant dash from class name 2023-09-05 17:41:25 -07:00
c23c41891d adding ChassisControl file for TeleOp
includes gamepad 1
2023-08-29 20:00:14 -07:00
05e56926b2 Add Roadrunner 2023-08-28 21:16:35 -07:00
729ba8a969 Add Motor Test Code 2023-08-22 17:19:44 -07:00
660a2f63bc Merge pull request #654 from FIRST-Tech-Challenge/20230707-131020-release-candidate
FtcRobotController v8.2
2023-07-11 09:06:22 -07:00
5c8b4c448b FtcRobotController v8.2 2023-07-07 15:15:11 -07:00
0879b4797f Merge pull request #506 from FIRST-Tech-Challenge/20221201-150726-release-candidate
FtcRobotController v8.1.1
2022-12-02 15:39:27 -08:00
3383440328 FtcRobotController v8.1.1 2022-12-02 15:13:28 -08:00
501c7c0e55 Merge pull request #488 from FIRST-Tech-Challenge/20221121-115119-release-candidate
FtcRobotController v8.1
2022-11-21 16:24:24 -08:00
35d4aa7192 FtcRobotController v8.1 2022-11-21 14:53:43 -08:00
76 changed files with 7273 additions and 1652 deletions

View File

@ -18,7 +18,7 @@ If what you've read so far makes little sense, there are some very good git lear
[Git Book](https://git-scm.com/book/en/v2)
[Interactive Git Tutorial](https://try.github.io)
##### Guidlines for experienced GIT users.
### Guidlines for experienced GIT users.
If you are absolutely certain that you want to push the big green button above, read on. Otherwise back _slowly away from keyboard_.
@ -38,9 +38,9 @@ This section guides you through filing a bug report. The better the report the
#### Before submitting a bug report
- Check the [forums](http://ftcforum.usfirst.org/forum.php) to see if someone else has run into the problem and whether there is an official solution that doesn't require a new SDK.
- Check the [forums](http://ftcforum.firstinspires.org/forum.php) to see if someone else has run into the problem and whether there is an official solution that doesn't require a new SDK.
- Perform a search of current [issues](https://github.com/ftctechnh/ftc_app/issues) to see if the problem has already been reported. If so, add a comment to the existing issue instead of creating a new one.
- Perform a search of current [issues](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues) to see if the problem has already been reported. If so, add a comment to the existing issue instead of creating a new one.
#### How Do I Submit A (Good) Bug Report?
@ -65,4 +65,4 @@ FIRST volunteers are awesome. You all have great ideas and we want to hear them
Enhancements should be broadly applicable to a large majority of teams, should not force teams to change their workflow, and should provide real value to the mission of FIRST as it relates to engaging youth in engineering activities.
The best way to get momentum behind new features is to post a description of your idea in the forums. Build community support for it. The FTC Technology Team monitors the forums. We'll hear you and if there's a large enough call for the feature it's very likely to get put on the list for a future release.
The best way to get momentum behind new features is to post a description of your idea in the discussions section of this repository. Build community support for it. The FTC Technology Team monitors the discussions. We'll hear you and if there's a large enough call for the feature it's very likely to get put on the list for a future release.

18
.gitignore vendored
View File

@ -9,8 +9,9 @@
# Files for the ART/Dalvik VM
*.dex
# Java class files
# Java/JDK files
*.class
*.hprof
# Generated files
bin/
@ -40,17 +41,10 @@ captures/
# IntelliJ
*.iml
.idea/workspace.xml
.idea/tasks.xml
.idea/gradle.xml
.idea/assetWizardSettings.xml
.idea/dictionaries
.idea/libraries
# Android Studio 3 in .gitignore file.
.idea/caches
.idea/modules.xml
# Comment next line if keeping position of elements in Navigation Editor is relevant for you
.idea/navEditor.xml
.idea/
# For Mac users
.DS_Store
# Keystore files
# Uncomment the following lines if you do not want to check your keystore files in.

View File

@ -8,7 +8,7 @@ apply plugin: 'com.android.library'
android {
defaultConfig {
minSdkVersion 23
minSdkVersion 24
//noinspection ExpiredTargetSdkVersion
targetSdkVersion 28
buildConfigField "String", "APP_BUILD_TIME", '"' + (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(new Date())) + '"'
@ -17,8 +17,8 @@ android {
compileSdkVersion 29
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_7
targetCompatibility JavaVersion.VERSION_1_7
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
namespace = 'com.qualcomm.ftcrobotcontroller'
}

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:versionCode="47"
android:versionName="8.0">
android:versionCode="50"
android:versionName="8.2">
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />

View File

@ -0,0 +1,192 @@
/* Copyright (c) 2023 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import java.util.List;
import android.util.Size;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import org.firstinspires.ftc.vision.apriltag.AprilTagGameDatabase;
/**
* This 2023-2024 OpMode illustrates the basics of AprilTag recognition and pose estimation,
* including Java Builder structures for specifying Vision parameters.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: AprilTag", group = "Concept")
@Disabled
public class ConceptAprilTag extends LinearOpMode {
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
/**
* {@link #aprilTag} is the variable to store our instance of the AprilTag processor.
*/
private AprilTagProcessor aprilTag;
/**
* {@link #visionPortal} is the variable to store our instance of the vision portal.
*/
private VisionPortal visionPortal;
@Override
public void runOpMode() {
initAprilTag();
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
if (opModeIsActive()) {
while (opModeIsActive()) {
telemetryAprilTag();
// Push telemetry to the Driver Station.
telemetry.update();
// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}
// Share the CPU.
sleep(20);
}
}
// Save more CPU resources when camera is no longer needed.
visionPortal.close();
} // end method runOpMode()
/**
* Initialize the AprilTag processor.
*/
private void initAprilTag() {
// Create the AprilTag processor.
aprilTag = new AprilTagProcessor.Builder()
//.setDrawAxes(false)
//.setDrawCubeProjection(false)
//.setDrawTagOutline(true)
//.setTagFamily(AprilTagProcessor.TagFamily.TAG_36h11)
//.setTagLibrary(AprilTagGameDatabase.getCenterStageTagLibrary())
//.setOutputUnits(DistanceUnit.INCH, AngleUnit.DEGREES)
// == CAMERA CALIBRATION ==
// If you do not manually specify calibration parameters, the SDK will attempt
// to load a predefined calibration for your camera.
//.setLensIntrinsics(578.272, 578.272, 402.145, 221.506)
// ... these parameters are fx, fy, cx, cy.
.build();
// Create the vision portal by using a builder.
VisionPortal.Builder builder = new VisionPortal.Builder();
// Set the camera (webcam vs. built-in RC phone camera).
if (USE_WEBCAM) {
builder.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"));
} else {
builder.setCamera(BuiltinCameraDirection.BACK);
}
// Choose a camera resolution. Not all cameras support all resolutions.
//builder.setCameraResolution(new Size(640, 480));
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
//builder.enableCameraMonitoring(true);
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
// Choose whether or not LiveView stops if no processors are enabled.
// If set "true", monitor shows solid orange screen if no processors enabled.
// If set "false", monitor shows camera view without annotations.
//builder.setAutoStopLiveView(false);
// Set and enable the processor.
builder.addProcessor(aprilTag);
// Build the Vision Portal, using the above settings.
visionPortal = builder.build();
// Disable or re-enable the aprilTag processor at any time.
//visionPortal.setProcessorEnabled(aprilTag, true);
} // end method initAprilTag()
/**
* Function to add telemetry about AprilTag detections.
*/
private void telemetryAprilTag() {
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
telemetry.addData("# AprilTags Detected", currentDetections.size());
// Step through the list of detections and display info for each one.
for (AprilTagDetection detection : currentDetections) {
if (detection.metadata != null) {
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
} else {
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
}
} // end for() loop
// Add "key" information to telemetry
telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");
telemetry.addLine("RBE = Range, Bearing & Elevation");
} // end method telemetryAprilTag()
} // end class

View File

@ -0,0 +1,148 @@
/* Copyright (c) 2023 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import java.util.List;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
/**
* This 2023-2024 OpMode illustrates the basics of AprilTag recognition and pose estimation, using
* the easy way.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: AprilTag Easy", group = "Concept")
@Disabled
public class ConceptAprilTagEasy extends LinearOpMode {
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
/**
* {@link #aprilTag} is the variable to store our instance of the AprilTag processor.
*/
private AprilTagProcessor aprilTag;
/**
* {@link #visionPortal} is the variable to store our instance of the vision portal.
*/
private VisionPortal visionPortal;
@Override
public void runOpMode() {
initAprilTag();
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
if (opModeIsActive()) {
while (opModeIsActive()) {
telemetryAprilTag();
// Push telemetry to the Driver Station.
telemetry.update();
// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}
// Share the CPU.
sleep(20);
}
}
// Save more CPU resources when camera is no longer needed.
visionPortal.close();
} // end method runOpMode()
/**
* Initialize the AprilTag processor.
*/
private void initAprilTag() {
// Create the AprilTag processor the easy way.
aprilTag = AprilTagProcessor.easyCreateWithDefaults();
// Create the vision portal the easy way.
if (USE_WEBCAM) {
visionPortal = VisionPortal.easyCreateWithDefaults(
hardwareMap.get(WebcamName.class, "Webcam 1"), aprilTag);
} else {
visionPortal = VisionPortal.easyCreateWithDefaults(
BuiltinCameraDirection.BACK, aprilTag);
}
} // end method initAprilTag()
/**
* Function to add telemetry about AprilTag detections.
*/
private void telemetryAprilTag() {
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
telemetry.addData("# AprilTags Detected", currentDetections.size());
// Step through the list of detections and display info for each one.
for (AprilTagDetection detection : currentDetections) {
if (detection.metadata != null) {
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
} else {
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
}
} // end for() loop
// Add "key" information to telemetry
telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");
telemetry.addLine("RBE = Range, Bearing & Elevation");
} // end method telemetryAprilTag()
} // end class

View File

@ -0,0 +1,248 @@
/* Copyright (c) 2023 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.util.Range;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.ExposureControl;
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.GainControl;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* This OpMode determines the best Exposure for minimizing image motion-blur on a Webcam
* Note that it is not possible to control the exposure for a Phone Camera, so if you are using a Phone for the Robot Controller
* this OpMode/Feature only applies to an externally connected Webcam
*
* The goal is to determine the smallest (shortest) Exposure value that still provides reliable Tag Detection.
* Starting with the minimum Exposure and maximum Gain, the exposure is slowly increased until the Tag is
* detected reliably from the likely operational distance.
*
*
* The best way to run this optimization is to view the camera preview screen while changing the exposure and gains.
*
* To do this, you need to view the RobotController screen directly (not from Driver Station)
* This can be done directly from a RC phone screen (if you are using an external Webcam), but for a Control Hub you must either plug an
* HDMI monitor into the Control Hub HDMI port, or use an external viewer program like ScrCpy (https://scrcpy.org/)
*
* Use Android Studio to Copy this Class, and Paste it into the TeamCode/src/main/java/org/firstinspires/ftc/teamcode folder.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name="Optimize AprilTag Exposure", group = "Concept")
@Disabled
public class ConceptAprilTagOptimizeExposure extends LinearOpMode
{
private VisionPortal visionPortal = null; // Used to manage the video source.
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
private int myExposure ;
private int minExposure ;
private int maxExposure ;
private int myGain ;
private int minGain ;
private int maxGain ;
boolean thisExpUp = false;
boolean thisExpDn = false;
boolean thisGainUp = false;
boolean thisGainDn = false;
boolean lastExpUp = false;
boolean lastExpDn = false;
boolean lastGainUp = false;
boolean lastGainDn = false;
@Override public void runOpMode()
{
// Initialize the Apriltag Detection process
initAprilTag();
// Establish Min and Max Gains and Exposure. Then set a low exposure with high gain
getCameraSetting();
myExposure = Math.min(5, minExposure);
myGain = maxGain;
setManualExposure(myExposure, myGain);
// Wait for the match to begin.
telemetry.addData("Camera preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
while (opModeIsActive())
{
telemetry.addLine("Find lowest Exposure that gives reliable detection.");
telemetry.addLine("Use Left bump/trig to adjust Exposure.");
telemetry.addLine("Use Right bump/trig to adjust Gain.\n");
// Display how many Tags Detected
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
int numTags = currentDetections.size();
if (numTags > 0 )
telemetry.addData("Tag", "####### %d Detected ######", currentDetections.size());
else
telemetry.addData("Tag", "----------- none - ----------");
telemetry.addData("Exposure","%d (%d - %d)", myExposure, minExposure, maxExposure);
telemetry.addData("Gain","%d (%d - %d)", myGain, minGain, maxGain);
telemetry.update();
// check to see if we need to change exposure or gain.
thisExpUp = gamepad1.left_bumper;
thisExpDn = gamepad1.left_trigger > 0.25;
thisGainUp = gamepad1.right_bumper;
thisGainDn = gamepad1.right_trigger > 0.25;
// look for clicks to change exposure
if (thisExpUp && !lastExpUp) {
myExposure = Range.clip(myExposure + 1, minExposure, maxExposure);
setManualExposure(myExposure, myGain);
} else if (thisExpDn && !lastExpDn) {
myExposure = Range.clip(myExposure - 1, minExposure, maxExposure);
setManualExposure(myExposure, myGain);
}
// look for clicks to change the gain
if (thisGainUp && !lastGainUp) {
myGain = Range.clip(myGain + 1, minGain, maxGain );
setManualExposure(myExposure, myGain);
} else if (thisGainDn && !lastGainDn) {
myGain = Range.clip(myGain - 1, minGain, maxGain );
setManualExposure(myExposure, myGain);
}
lastExpUp = thisExpUp;
lastExpDn = thisExpDn;
lastGainUp = thisGainUp;
lastGainDn = thisGainDn;
sleep(20);
}
}
/**
* Initialize the AprilTag processor.
*/
private void initAprilTag() {
// Create the AprilTag processor by using a builder.
aprilTag = new AprilTagProcessor.Builder().build();
// Create the WEBCAM vision portal by using a builder.
visionPortal = new VisionPortal.Builder()
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.addProcessor(aprilTag)
.build();
}
/*
Manually set the camera gain and exposure.
Can only be called AFTER calling initAprilTag();
Returns true if controls are set.
*/
private boolean setManualExposure(int exposureMS, int gain) {
// Ensure Vision Portal has been setup.
if (visionPortal == null) {
return false;
}
// Wait for the camera to be open
if (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING) {
telemetry.addData("Camera", "Waiting");
telemetry.update();
while (!isStopRequested() && (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING)) {
sleep(20);
}
telemetry.addData("Camera", "Ready");
telemetry.update();
}
// Set camera controls unless we are stopping.
if (!isStopRequested())
{
// Set exposure. Make sure we are in Manual Mode for these values to take effect.
ExposureControl exposureControl = visionPortal.getCameraControl(ExposureControl.class);
if (exposureControl.getMode() != ExposureControl.Mode.Manual) {
exposureControl.setMode(ExposureControl.Mode.Manual);
sleep(50);
}
exposureControl.setExposure((long)exposureMS, TimeUnit.MILLISECONDS);
sleep(20);
// Set Gain.
GainControl gainControl = visionPortal.getCameraControl(GainControl.class);
gainControl.setGain(gain);
sleep(20);
return (true);
} else {
return (false);
}
}
/*
Read this camera's minimum and maximum Exposure and Gain settings.
Can only be called AFTER calling initAprilTag();
*/
private void getCameraSetting() {
// Ensure Vision Portal has been setup.
if (visionPortal == null) {
return;
}
// Wait for the camera to be open
if (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING) {
telemetry.addData("Camera", "Waiting");
telemetry.update();
while (!isStopRequested() && (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING)) {
sleep(20);
}
telemetry.addData("Camera", "Ready");
telemetry.update();
}
// Get camera control values unless we are stopping.
if (!isStopRequested()) {
ExposureControl exposureControl = visionPortal.getCameraControl(ExposureControl.class);
minExposure = (int)exposureControl.getMinExposure(TimeUnit.MILLISECONDS) + 1;
maxExposure = (int)exposureControl.getMaxExposure(TimeUnit.MILLISECONDS);
GainControl gainControl = visionPortal.getCameraControl(GainControl.class);
minGain = gainControl.getMinGain();
maxGain = gainControl.getMaxGain();
}
}
}

View File

@ -0,0 +1,195 @@
/* Copyright (c) 2023 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import java.util.List;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraName;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.VisionPortal.CameraState;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
/**
* This 2023-2024 OpMode illustrates the basics of AprilTag recognition and pose estimation, using
* two webcams.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: AprilTag Switchable Cameras", group = "Concept")
@Disabled
public class ConceptAprilTagSwitchableCameras extends LinearOpMode {
/**
* Variables used for switching cameras.
*/
private WebcamName webcam1, webcam2;
private boolean oldLeftBumper;
private boolean oldRightBumper;
/**
* {@link #aprilTag} is the variable to store our instance of the AprilTag processor.
*/
private AprilTagProcessor aprilTag;
/**
* {@link #visionPortal} is the variable to store our instance of the vision portal.
*/
private VisionPortal visionPortal;
@Override
public void runOpMode() {
initAprilTag();
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
if (opModeIsActive()) {
while (opModeIsActive()) {
telemetryCameraSwitching();
telemetryAprilTag();
// Push telemetry to the Driver Station.
telemetry.update();
// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}
doCameraSwitching();
// Share the CPU.
sleep(20);
}
}
// Save more CPU resources when camera is no longer needed.
visionPortal.close();
} // end runOpMode()
/**
* Initialize the AprilTag processor.
*/
private void initAprilTag() {
// Create the AprilTag processor by using a builder.
aprilTag = new AprilTagProcessor.Builder().build();
webcam1 = hardwareMap.get(WebcamName.class, "Webcam 1");
webcam2 = hardwareMap.get(WebcamName.class, "Webcam 2");
CameraName switchableCamera = ClassFactory.getInstance()
.getCameraManager().nameForSwitchableCamera(webcam1, webcam2);
// Create the vision portal by using a builder.
visionPortal = new VisionPortal.Builder()
.setCamera(switchableCamera)
.addProcessor(aprilTag)
.build();
} // end method initAprilTag()
/**
* Function to add telemetry about camera switching.
*/
private void telemetryCameraSwitching() {
if (visionPortal.getActiveCamera().equals(webcam1)) {
telemetry.addData("activeCamera", "Webcam 1");
telemetry.addData("Press RightBumper", "to switch to Webcam 2");
} else {
telemetry.addData("activeCamera", "Webcam 2");
telemetry.addData("Press LeftBumper", "to switch to Webcam 1");
}
} // end method telemetryCameraSwitching()
/**
* Function to add telemetry about AprilTag detections.
*/
private void telemetryAprilTag() {
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
telemetry.addData("# AprilTags Detected", currentDetections.size());
// Step through the list of detections and display info for each one.
for (AprilTagDetection detection : currentDetections) {
if (detection.metadata != null) {
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
} else {
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
}
} // end for() loop
// Add "key" information to telemetry
telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");
telemetry.addLine("RBE = Range, Bearing & Elevation");
} // end method telemetryAprilTag()
/**
* Function to set the active camera according to input from the gamepad.
*/
private void doCameraSwitching() {
if (visionPortal.getCameraState() == CameraState.STREAMING) {
// If the left bumper is pressed, use Webcam 1.
// If the right bumper is pressed, use Webcam 2.
boolean newLeftBumper = gamepad1.left_bumper;
boolean newRightBumper = gamepad1.right_bumper;
if (newLeftBumper && !oldLeftBumper) {
visionPortal.setActiveCamera(webcam1);
} else if (newRightBumper && !oldRightBumper) {
visionPortal.setActiveCamera(webcam2);
}
oldLeftBumper = newLeftBumper;
oldRightBumper = newRightBumper;
}
} // end method doCameraSwitching()
} // end class

View File

@ -0,0 +1,201 @@
/* Copyright (c) 2023 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import java.util.List;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
/**
* This 2023-2024 OpMode illustrates the basics of using both AprilTag recognition and TensorFlow
* Object Detection.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: Double Vision", group = "Concept")
@Disabled
public class ConceptDoubleVision extends LinearOpMode {
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
/**
* {@link #aprilTag} is the variable to store our instance of the AprilTag processor.
*/
private AprilTagProcessor aprilTag;
/**
* {@link #tfod} is the variable to store our instance of the TensorFlow Object Detection processor.
*/
private TfodProcessor tfod;
/**
* {@link #myVisionPortal} is the variable to store our instance of the vision portal.
*/
private VisionPortal myVisionPortal;
@Override
public void runOpMode() {
initDoubleVision();
// This OpMode loops continuously, allowing the user to switch between
// AprilTag and TensorFlow Object Detection (TFOD) image processors.
while (!isStopRequested()) {
if (opModeInInit()) {
telemetry.addData("DS preview on/off","3 dots, Camera Stream");
telemetry.addLine();
telemetry.addLine("----------------------------------------");
}
if (myVisionPortal.getProcessorEnabled(aprilTag)) {
// User instructions: Dpad left or Dpad right.
telemetry.addLine("Dpad Left to disable AprilTag");
telemetry.addLine();
telemetryAprilTag();
} else {
telemetry.addLine("Dpad Right to enable AprilTag");
}
telemetry.addLine();
telemetry.addLine("----------------------------------------");
if (myVisionPortal.getProcessorEnabled(tfod)) {
telemetry.addLine("Dpad Down to disable TFOD");
telemetry.addLine();
telemetryTfod();
} else {
telemetry.addLine("Dpad Up to enable TFOD");
}
// Push telemetry to the Driver Station.
telemetry.update();
if (gamepad1.dpad_left) {
myVisionPortal.setProcessorEnabled(aprilTag, false);
} else if (gamepad1.dpad_right) {
myVisionPortal.setProcessorEnabled(aprilTag, true);
}
if (gamepad1.dpad_down) {
myVisionPortal.setProcessorEnabled(tfod, false);
} else if (gamepad1.dpad_up) {
myVisionPortal.setProcessorEnabled(tfod, true);
}
sleep(20);
} // end while loop
} // end method runOpMode()
/**
* Function to initialize AprilTag and TFOD.
*/
private void initDoubleVision() {
// -----------------------------------------------------------------------------------------
// AprilTag Configuration
// -----------------------------------------------------------------------------------------
aprilTag = new AprilTagProcessor.Builder()
.build();
// -----------------------------------------------------------------------------------------
// TFOD Configuration
// -----------------------------------------------------------------------------------------
tfod = new TfodProcessor.Builder()
.build();
// -----------------------------------------------------------------------------------------
// Camera Configuration
// -----------------------------------------------------------------------------------------
if (USE_WEBCAM) {
myVisionPortal = new VisionPortal.Builder()
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.addProcessors(tfod, aprilTag)
.build();
} else {
myVisionPortal = new VisionPortal.Builder()
.setCamera(BuiltinCameraDirection.BACK)
.addProcessors(tfod, aprilTag)
.build();
}
} // end initDoubleVision()
/**
* Function to add telemetry about AprilTag detections.
*/
private void telemetryAprilTag() {
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
telemetry.addData("# AprilTags Detected", currentDetections.size());
// Step through the list of detections and display info for each one.
for (AprilTagDetection detection : currentDetections) {
if (detection.metadata != null) {
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
} else {
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
}
} // end for() loop
} // end method telemetryAprilTag()
/**
* Function to add telemetry about TensorFlow Object Detection (TFOD) recognitions.
*/
private void telemetryTfod() {
List<Recognition> currentRecognitions = tfod.getRecognitions();
telemetry.addData("# Objects Detected", currentRecognitions.size());
// Step through the list of recognitions and display info for each one.
for (Recognition recognition : currentRecognitions) {
double x = (recognition.getLeft() + recognition.getRight()) / 2 ;
double y = (recognition.getTop() + recognition.getBottom()) / 2 ;
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100);
telemetry.addData("- Position", "%.0f / %.0f", x, y);
telemetry.addData("- Size", "%.0f x %.0f", recognition.getWidth(), recognition.getHeight());
} // end for() loop
} // end method telemetryTfod()
} // end class

View File

@ -0,0 +1,185 @@
/*
Copyright (c) 2022 REV Robotics, FIRST
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted (subject to the limitations in the disclaimer below) provided that
the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list
of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
Neither the name of REV Robotics nor the names of its contributors may be used to
endorse or promote products derived from this software without specific prior
written permission.
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
/**
* This file demonstrates the impact of setting the IMU orientation correctly or incorrectly. This
* code assumes there is an IMU configured with the name "imu".
* <p>
* Note: This OpMode is more of a tool than a code sample. The User Interface portion of this code
* goes beyond simply showing how to interface to the IMU.<br>
* For a minimal example of interfacing to an IMU, please see the SensorIMUOrthogonal or SensorIMUNonOrthogonal sample OpModes.
* <p>
* This sample enables you to re-specify the Hub Mounting orientation dynamically by using gamepad controls.
* While doing so, the sample will display how Pitch, Roll and Yaw angles change as the hub is moved.
* <p>
* The gamepad controls let you change the two parameters that specify how the Control/Expansion Hub is mounted. <br>
* The first parameter specifies which direction the printed logo on the Hub is pointing. <br>
* The second parameter specifies which direction the USB connector on the Hub is pointing. <br>
* All directions are relative to the robot, and left/right is as viewed from behind the robot.
* <p>
* How will you know if you have chosen the correct Orientation? With the correct orientation
* parameters selected, pitch/roll/yaw should act as follows:
* <p>
* Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X) <br>
* Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y) <br>
* Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z) <br>
* <p>
* The Yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller)
* <p>
* The rotational velocities should follow the change in corresponding axes.
*/
@TeleOp(name="Concept: IMU Orientation", group="Concept")
@Disabled
public class ConceptExploringIMUOrientation extends LinearOpMode {
static RevHubOrientationOnRobot.LogoFacingDirection[] logoFacingDirections
= RevHubOrientationOnRobot.LogoFacingDirection.values();
static RevHubOrientationOnRobot.UsbFacingDirection[] usbFacingDirections
= RevHubOrientationOnRobot.UsbFacingDirection.values();
static int LAST_DIRECTION = logoFacingDirections.length - 1;
static float TRIGGER_THRESHOLD = 0.2f;
IMU imu;
int logoFacingDirectionPosition;
int usbFacingDirectionPosition;
boolean orientationIsValid = true;
@Override public void runOpMode() throws InterruptedException {
imu = hardwareMap.get(IMU.class, "imu");
logoFacingDirectionPosition = 0; // Up
usbFacingDirectionPosition = 2; // Forward
updateOrientation();
boolean justChangedLogoDirection = false;
boolean justChangedUsbDirection = false;
// Loop until stop requested
while (!isStopRequested()) {
// Check to see if Yaw reset is requested (Y button)
if (gamepad1.y) {
telemetry.addData("Yaw", "Resetting\n");
imu.resetYaw();
} else {
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset.\n");
}
// Check to see if new Logo Direction is requested
if (gamepad1.left_bumper || gamepad1.right_bumper) {
if (!justChangedLogoDirection) {
justChangedLogoDirection = true;
if (gamepad1.left_bumper) {
logoFacingDirectionPosition--;
if (logoFacingDirectionPosition < 0) {
logoFacingDirectionPosition = LAST_DIRECTION;
}
} else {
logoFacingDirectionPosition++;
if (logoFacingDirectionPosition > LAST_DIRECTION) {
logoFacingDirectionPosition = 0;
}
}
updateOrientation();
}
} else {
justChangedLogoDirection = false;
}
// Check to see if new USB Direction is requested
if (gamepad1.left_trigger > TRIGGER_THRESHOLD || gamepad1.right_trigger > TRIGGER_THRESHOLD) {
if (!justChangedUsbDirection) {
justChangedUsbDirection = true;
if (gamepad1.left_trigger > TRIGGER_THRESHOLD) {
usbFacingDirectionPosition--;
if (usbFacingDirectionPosition < 0) {
usbFacingDirectionPosition = LAST_DIRECTION;
}
} else {
usbFacingDirectionPosition++;
if (usbFacingDirectionPosition > LAST_DIRECTION) {
usbFacingDirectionPosition = 0;
}
}
updateOrientation();
}
} else {
justChangedUsbDirection = false;
}
// Display User instructions and IMU data
telemetry.addData("logo Direction (set with bumpers)", logoFacingDirections[logoFacingDirectionPosition]);
telemetry.addData("usb Direction (set with triggers)", usbFacingDirections[usbFacingDirectionPosition] + "\n");
if (orientationIsValid) {
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
} else {
telemetry.addData("Error", "Selected orientation on robot is invalid");
}
telemetry.update();
}
}
// apply any requested orientation changes.
void updateOrientation() {
RevHubOrientationOnRobot.LogoFacingDirection logo = logoFacingDirections[logoFacingDirectionPosition];
RevHubOrientationOnRobot.UsbFacingDirection usb = usbFacingDirections[usbFacingDirectionPosition];
try {
RevHubOrientationOnRobot orientationOnRobot = new RevHubOrientationOnRobot(logo, usb);
imu.initialize(new IMU.Parameters(orientationOnRobot));
orientationIsValid = true;
} catch (IllegalArgumentException e) {
orientationIsValid = false;
}
}
}

View File

@ -33,157 +33,149 @@ import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import java.util.List;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection;
import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
/**
* This 2022-2023 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine which image is being presented to the robot.
* This 2023-2024 OpMode illustrates the basics of TensorFlow Object Detection,
* including Java Builder structures for specifying Vision parameters.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@TeleOp(name = "Concept: TensorFlow Object Detection", group = "Concept")
@Disabled
public class ConceptTensorFlowObjectDetection extends LinearOpMode {
/*
* Specify the source for the Tensor Flow Model.
* If the TensorFlowLite object model is included in the Robot Controller App as an "asset",
* the OpMode must to load it using loadModelFromAsset(). However, if a team generated model
* has been downloaded to the Robot Controller's SD FLASH memory, it must to be loaded using loadModelFromFile()
* Here we assume it's an Asset. Also see method initTfod() below .
*/
private static final String TFOD_MODEL_ASSET = "PowerPlay.tflite";
// private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/CustomTeamModel.tflite";
private static final String[] LABELS = {
"1 Bolt",
"2 Bulb",
"3 Panel"
};
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
private static final String VUFORIA_KEY =
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
/**
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
* localization engine.
* {@link #tfod} is the variable to store our instance of the TensorFlow Object Detection processor.
*/
private VuforiaLocalizer vuforia;
private TfodProcessor tfod;
/**
* {@link #tfod} is the variable we will use to store our instance of the TensorFlow Object
* Detection engine.
* {@link #visionPortal} is the variable to store our instance of the vision portal.
*/
private TFObjectDetector tfod;
private VisionPortal visionPortal;
@Override
public void runOpMode() {
// The TFObjectDetector uses the camera frames from the VuforiaLocalizer, so we create that
// first.
initVuforia();
initTfod();
/**
* Activate TensorFlow Object Detection before we wait for the start command.
* Do it here so that the Camera Stream window will have the TensorFlow annotations visible.
**/
if (tfod != null) {
tfod.activate();
// The TensorFlow software will scale the input images from the camera to a lower resolution.
// This can result in lower detection accuracy at longer distances (> 55cm or 22").
// If your target is at distance greater than 50 cm (20") you can increase the magnification value
// to artificially zoom in to the center of image. For best results, the "aspectRatio" argument
// should be set to the value of the images used to create the TensorFlow Object Detection model
// (typically 16/9).
tfod.setZoom(1.0, 16.0/9.0);
}
/** Wait for the game to begin */
telemetry.addData(">", "Press Play to start op mode");
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
if (opModeIsActive()) {
while (opModeIsActive()) {
if (tfod != null) {
// getUpdatedRecognitions() will return null if no new information is available since
// the last time that call was made.
List<Recognition> updatedRecognitions = tfod.getUpdatedRecognitions();
if (updatedRecognitions != null) {
telemetry.addData("# Objects Detected", updatedRecognitions.size());
// step through the list of recognitions and display image position/size information for each one
// Note: "Image number" refers to the randomized image orientation/number
for (Recognition recognition : updatedRecognitions) {
double col = (recognition.getLeft() + recognition.getRight()) / 2 ;
double row = (recognition.getTop() + recognition.getBottom()) / 2 ;
double width = Math.abs(recognition.getRight() - recognition.getLeft()) ;
double height = Math.abs(recognition.getTop() - recognition.getBottom()) ;
telemetryTfod();
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100 );
telemetry.addData("- Position (Row/Col)","%.0f / %.0f", row, col);
telemetry.addData("- Size (Width/Height)","%.0f / %.0f", width, height);
}
telemetry.update();
}
// Push telemetry to the Driver Station.
telemetry.update();
// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}
// Share the CPU.
sleep(20);
}
}
}
// Save more CPU resources when camera is no longer needed.
visionPortal.close();
} // end runOpMode()
/**
* Initialize the Vuforia localization engine.
*/
private void initVuforia() {
/*
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
*/
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
parameters.vuforiaLicenseKey = VUFORIA_KEY;
parameters.cameraDirection = CameraDirection.BACK;
// Instantiate the Vuforia engine
vuforia = ClassFactory.getInstance().createVuforia(parameters);
}
/**
* Initialize the TensorFlow Object Detection engine.
* Initialize the TensorFlow Object Detection processor.
*/
private void initTfod() {
int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
tfodParameters.minResultConfidence = 0.75f;
tfodParameters.isModelTensorFlow2 = true;
tfodParameters.inputSize = 300;
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
// Use loadModelFromAsset() if the TF Model is built in as an asset by Android Studio
// Use loadModelFromFile() if you have downloaded a custom team model to the Robot Controller's FLASH.
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
// tfod.loadModelFromFile(TFOD_MODEL_FILE, LABELS);
}
}
// Create the TensorFlow processor by using a builder.
tfod = new TfodProcessor.Builder()
// Use setModelAssetName() if the TF Model is built in as an asset.
// Use setModelFileName() if you have downloaded a custom team model to the Robot Controller.
//.setModelAssetName(TFOD_MODEL_ASSET)
//.setModelFileName(TFOD_MODEL_FILE)
//.setModelLabels(LABELS)
//.setIsModelTensorFlow2(true)
//.setIsModelQuantized(true)
//.setModelInputSize(300)
//.setModelAspectRatio(16.0 / 9.0)
.build();
// Create the vision portal by using a builder.
VisionPortal.Builder builder = new VisionPortal.Builder();
// Set the camera (webcam vs. built-in RC phone camera).
if (USE_WEBCAM) {
builder.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"));
} else {
builder.setCamera(BuiltinCameraDirection.BACK);
}
// Choose a camera resolution. Not all cameras support all resolutions.
//builder.setCameraResolution(new Size(640, 480));
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
//builder.enableCameraMonitoring(true);
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
// Choose whether or not LiveView stops if no processors are enabled.
// If set "true", monitor shows solid orange screen if no processors enabled.
// If set "false", monitor shows camera view without annotations.
//builder.setAutoStopLiveView(false);
// Set and enable the processor.
builder.addProcessor(tfod);
// Build the Vision Portal, using the above settings.
visionPortal = builder.build();
// Set confidence threshold for TFOD recognitions, at any time.
//tfod.setMinResultConfidence(0.75f);
// Disable or re-enable the TFOD processor at any time.
//visionPortal.setProcessorEnabled(tfod, true);
} // end method initTfod()
/**
* Function to add telemetry about TensorFlow Object Detection (TFOD) recognitions.
*/
private void telemetryTfod() {
List<Recognition> currentRecognitions = tfod.getRecognitions();
telemetry.addData("# Objects Detected", currentRecognitions.size());
// Step through the list of recognitions and display info for each one.
for (Recognition recognition : currentRecognitions) {
double x = (recognition.getLeft() + recognition.getRight()) / 2 ;
double y = (recognition.getTop() + recognition.getBottom()) / 2 ;
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100);
telemetry.addData("- Position", "%.0f / %.0f", x, y);
telemetry.addData("- Size", "%.0f x %.0f", recognition.getWidth(), recognition.getHeight());
} // end for() loop
} // end method telemetryTfod()
} // end class

View File

@ -0,0 +1,141 @@
/* Copyright (c) 2019 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import java.util.List;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
/**
* This 2023-2024 OpMode illustrates the basics of TensorFlow Object Detection, using
* the easiest way.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: TensorFlow Object Detection Easy", group = "Concept")
@Disabled
public class ConceptTensorFlowObjectDetectionEasy extends LinearOpMode {
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
/**
* {@link #tfod} is the variable to store our instance of the TensorFlow Object Detection processor.
*/
private TfodProcessor tfod;
/**
* {@link #visionPortal} is the variable to store our instance of the vision portal.
*/
private VisionPortal visionPortal;
@Override
public void runOpMode() {
initTfod();
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
if (opModeIsActive()) {
while (opModeIsActive()) {
telemetryTfod();
// Push telemetry to the Driver Station.
telemetry.update();
// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}
// Share the CPU.
sleep(20);
}
}
// Save more CPU resources when camera is no longer needed.
visionPortal.close();
} // end runOpMode()
/**
* Initialize the TensorFlow Object Detection processor.
*/
private void initTfod() {
// Create the TensorFlow processor the easy way.
tfod = TfodProcessor.easyCreateWithDefaults();
// Create the vision portal the easy way.
if (USE_WEBCAM) {
visionPortal = VisionPortal.easyCreateWithDefaults(
hardwareMap.get(WebcamName.class, "Webcam 1"), tfod);
} else {
visionPortal = VisionPortal.easyCreateWithDefaults(
BuiltinCameraDirection.BACK, tfod);
}
} // end method initTfod()
/**
* Function to add telemetry about TensorFlow Object Detection (TFOD) recognitions.
*/
private void telemetryTfod() {
List<Recognition> currentRecognitions = tfod.getRecognitions();
telemetry.addData("# Objects Detected", currentRecognitions.size());
// Step through the list of recognitions and display info for each one.
for (Recognition recognition : currentRecognitions) {
double x = (recognition.getLeft() + recognition.getRight()) / 2 ;
double y = (recognition.getTop() + recognition.getBottom()) / 2 ;
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100);
telemetry.addData("- Position", "%.0f / %.0f", x, y);
telemetry.addData("- Size", "%.0f x %.0f", recognition.getWidth(), recognition.getHeight());
} // end for() loop
} // end method telemetryTfod()
} // end class

View File

@ -34,191 +34,152 @@ import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import java.util.List;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.hardware.camera.SwitchableCamera;
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraName;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.VisionPortal.CameraState;
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
/**
* This 2022-2023 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine which image is being presented to the robot.
* This 2023-2024 OpMode illustrates the basics of TensorFlow Object Detection, using
* two webcams.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: TensorFlow Object Detection Switchable Cameras", group = "Concept")
@Disabled
public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpMode {
/*
* Specify the source for the Tensor Flow Model.
* If the TensorFlowLite object model is included in the Robot Controller App as an "asset",
* the OpMode must to load it using loadModelFromAsset(). However, if a team generated model
* has been downloaded to the Robot Controller's SD FLASH memory, it must to be loaded using loadModelFromFile()
* Here we assume it's an Asset. Also see method initTfod() below .
*/
private static final String TFOD_MODEL_ASSET = "PowerPlay.tflite";
// private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/CustomTeamModel.tflite";
private static final String[] LABELS = {
"1 Bolt",
"2 Bulb",
"3 Panel"
};
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
private static final String VUFORIA_KEY =
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
/**
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
* localization engine.
*/
private VuforiaLocalizer vuforia;
/**
* Variables used for switching cameras.
*/
private WebcamName webcam1, webcam2;
private SwitchableCamera switchableCamera;
private boolean oldLeftBumper;
private boolean oldRightBumper;
/**
* {@link #tfod} is the variable we will use to store our instance of the TensorFlow Object
* Detection engine.
* {@link #tfod} is the variable to store our instance of the TensorFlow Object Detection processor.
*/
private TFObjectDetector tfod;
private TfodProcessor tfod;
/**
* {@link #visionPortal} is the variable to store our instance of the vision portal.
*/
private VisionPortal visionPortal;
@Override
public void runOpMode() {
// The TFObjectDetector uses the camera frames from the VuforiaLocalizer, so we create that
// first.
initVuforia();
initTfod();
/**
* Activate TensorFlow Object Detection before we wait for the start command.
* Do it here so that the Camera Stream window will have the TensorFlow annotations visible.
**/
if (tfod != null) {
tfod.activate();
// The TensorFlow software will scale the input images from the camera to a lower resolution.
// This can result in lower detection accuracy at longer distances (> 55cm or 22").
// If your target is at distance greater than 50 cm (20") you can increase the magnification value
// to artificially zoom in to the center of image. For best results, the "aspectRatio" argument
// should be set to the value of the images used to create the TensorFlow Object Detection model
// (typically 16/9).
tfod.setZoom(1.0, 16.0/9.0);
}
/** Wait for the game to begin */
telemetry.addData(">", "Press Play to start op mode");
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
if (opModeIsActive()) {
while (opModeIsActive()) {
if (tfod != null) {
doCameraSwitching();
List<Recognition> recognitions = tfod.getRecognitions();
telemetry.addData("# Objects Detected", recognitions.size());
// step through the list of recognitions and display image size and position
// Note: "Image number" refers to the randomized image orientation/number
for (Recognition recognition : recognitions) {
double col = (recognition.getLeft() + recognition.getRight()) / 2 ;
double row = (recognition.getTop() + recognition.getBottom()) / 2 ;
double width = Math.abs(recognition.getRight() - recognition.getLeft()) ;
double height = Math.abs(recognition.getTop() - recognition.getBottom()) ;
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100 );
telemetry.addData("- Position (Row/Col)","%.0f / %.0f", row, col);
telemetry.addData("- Size (Width/Height)","%.0f / %.0f", width, height);
}
telemetry.update();
telemetryCameraSwitching();
telemetryTfod();
// Push telemetry to the Driver Station.
telemetry.update();
// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}
doCameraSwitching();
// Share the CPU.
sleep(20);
}
}
}
// Save more CPU resources when camera is no longer needed.
visionPortal.close();
} // end runOpMode()
/**
* Initialize the Vuforia localization engine.
*/
private void initVuforia() {
/*
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
*/
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
parameters.vuforiaLicenseKey = VUFORIA_KEY;
// Indicate that we wish to be able to switch cameras.
webcam1 = hardwareMap.get(WebcamName.class, "Webcam 1");
webcam2 = hardwareMap.get(WebcamName.class, "Webcam 2");
parameters.cameraName = ClassFactory.getInstance().getCameraManager().nameForSwitchableCamera(webcam1, webcam2);
// Instantiate the Vuforia engine
vuforia = ClassFactory.getInstance().createVuforia(parameters);
// Set the active camera to Webcam 1.
switchableCamera = (SwitchableCamera) vuforia.getCamera();
switchableCamera.setActiveCamera(webcam1);
}
/**
* Initialize the TensorFlow Object Detection engine.
* Initialize the TensorFlow Object Detection processor.
*/
private void initTfod() {
int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
tfodParameters.minResultConfidence = 0.75f;
tfodParameters.isModelTensorFlow2 = true;
tfodParameters.inputSize = 300;
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
// Use loadModelFromAsset() if the TF Model is built in as an asset by Android Studio
// Use loadModelFromFile() if you have downloaded a custom team model to the Robot Controller's FLASH.
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
// tfod.loadModelFromFile(TFOD_MODEL_FILE, LABELS);
}
// Create the TensorFlow processor by using a builder.
tfod = new TfodProcessor.Builder().build();
private void doCameraSwitching() {
// If the left bumper is pressed, use Webcam 1.
// If the right bumper is pressed, use Webcam 2.
boolean newLeftBumper = gamepad1.left_bumper;
boolean newRightBumper = gamepad1.right_bumper;
if (newLeftBumper && !oldLeftBumper) {
switchableCamera.setActiveCamera(webcam1);
} else if (newRightBumper && !oldRightBumper) {
switchableCamera.setActiveCamera(webcam2);
}
oldLeftBumper = newLeftBumper;
oldRightBumper = newRightBumper;
webcam1 = hardwareMap.get(WebcamName.class, "Webcam 1");
webcam2 = hardwareMap.get(WebcamName.class, "Webcam 2");
CameraName switchableCamera = ClassFactory.getInstance()
.getCameraManager().nameForSwitchableCamera(webcam1, webcam2);
if (switchableCamera.getActiveCamera().equals(webcam1)) {
// Create the vision portal by using a builder.
visionPortal = new VisionPortal.Builder()
.setCamera(switchableCamera)
.addProcessor(tfod)
.build();
} // end method initTfod()
/**
* Function to add telemetry about camera switching.
*/
private void telemetryCameraSwitching() {
if (visionPortal.getActiveCamera().equals(webcam1)) {
telemetry.addData("activeCamera", "Webcam 1");
telemetry.addData("Press RightBumper", "to switch to Webcam 2");
} else {
telemetry.addData("activeCamera", "Webcam 2");
telemetry.addData("Press LeftBumper", "to switch to Webcam 1");
}
}
}
} // end method telemetryCameraSwitching()
/**
* Function to add telemetry about TensorFlow Object Detection (TFOD) recognitions.
*/
private void telemetryTfod() {
List<Recognition> currentRecognitions = tfod.getRecognitions();
telemetry.addData("# Objects Detected", currentRecognitions.size());
// Step through the list of recognitions and display info for each one.
for (Recognition recognition : currentRecognitions) {
double x = (recognition.getLeft() + recognition.getRight()) / 2 ;
double y = (recognition.getTop() + recognition.getBottom()) / 2 ;
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100);
telemetry.addData("- Position", "%.0f / %.0f", x, y);
telemetry.addData("- Size", "%.0f x %.0f", recognition.getWidth(), recognition.getHeight());
} // end for() loop
} // end method telemetryTfod()
/**
* Function to set the active camera according to input from the gamepad.
*/
private void doCameraSwitching() {
if (visionPortal.getCameraState() == CameraState.STREAMING) {
// If the left bumper is pressed, use Webcam 1.
// If the right bumper is pressed, use Webcam 2.
boolean newLeftBumper = gamepad1.left_bumper;
boolean newRightBumper = gamepad1.right_bumper;
if (newLeftBumper && !oldLeftBumper) {
visionPortal.setActiveCamera(webcam1);
} else if (newRightBumper && !oldRightBumper) {
visionPortal.setActiveCamera(webcam2);
}
oldLeftBumper = newLeftBumper;
oldRightBumper = newRightBumper;
}
} // end method doCameraSwitching()
} // end class

View File

@ -1,190 +0,0 @@
/* Copyright (c) 2019 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import java.util.List;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
/**
* This 2022-2023 OpMode illustrates the basics of using the TensorFlow Object Detection API to
* determine which image is being presented to the robot.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@TeleOp(name = "Concept: TensorFlow Object Detection Webcam", group = "Concept")
@Disabled
public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
/*
* Specify the source for the Tensor Flow Model.
* If the TensorFlowLite object model is included in the Robot Controller App as an "asset",
* the OpMode must to load it using loadModelFromAsset(). However, if a team generated model
* has been downloaded to the Robot Controller's SD FLASH memory, it must to be loaded using loadModelFromFile()
* Here we assume it's an Asset. Also see method initTfod() below .
*/
private static final String TFOD_MODEL_ASSET = "PowerPlay.tflite";
// private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/CustomTeamModel.tflite";
private static final String[] LABELS = {
"1 Bolt",
"2 Bulb",
"3 Panel"
};
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
private static final String VUFORIA_KEY =
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
/**
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
* localization engine.
*/
private VuforiaLocalizer vuforia;
/**
* {@link #tfod} is the variable we will use to store our instance of the TensorFlow Object
* Detection engine.
*/
private TFObjectDetector tfod;
@Override
public void runOpMode() {
// The TFObjectDetector uses the camera frames from the VuforiaLocalizer, so we create that
// first.
initVuforia();
initTfod();
/**
* Activate TensorFlow Object Detection before we wait for the start command.
* Do it here so that the Camera Stream window will have the TensorFlow annotations visible.
**/
if (tfod != null) {
tfod.activate();
// The TensorFlow software will scale the input images from the camera to a lower resolution.
// This can result in lower detection accuracy at longer distances (> 55cm or 22").
// If your target is at distance greater than 50 cm (20") you can increase the magnification value
// to artificially zoom in to the center of image. For best results, the "aspectRatio" argument
// should be set to the value of the images used to create the TensorFlow Object Detection model
// (typically 16/9).
tfod.setZoom(1.0, 16.0/9.0);
}
/** Wait for the game to begin */
telemetry.addData(">", "Press Play to start op mode");
telemetry.update();
waitForStart();
if (opModeIsActive()) {
while (opModeIsActive()) {
if (tfod != null) {
// getUpdatedRecognitions() will return null if no new information is available since
// the last time that call was made.
List<Recognition> updatedRecognitions = tfod.getUpdatedRecognitions();
if (updatedRecognitions != null) {
telemetry.addData("# Objects Detected", updatedRecognitions.size());
// step through the list of recognitions and display image position/size information for each one
// Note: "Image number" refers to the randomized image orientation/number
for (Recognition recognition : updatedRecognitions) {
double col = (recognition.getLeft() + recognition.getRight()) / 2 ;
double row = (recognition.getTop() + recognition.getBottom()) / 2 ;
double width = Math.abs(recognition.getRight() - recognition.getLeft()) ;
double height = Math.abs(recognition.getTop() - recognition.getBottom()) ;
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100 );
telemetry.addData("- Position (Row/Col)","%.0f / %.0f", row, col);
telemetry.addData("- Size (Width/Height)","%.0f / %.0f", width, height);
}
telemetry.update();
}
}
}
}
}
/**
* Initialize the Vuforia localization engine.
*/
private void initVuforia() {
/*
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
*/
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
parameters.vuforiaLicenseKey = VUFORIA_KEY;
parameters.cameraName = hardwareMap.get(WebcamName.class, "Webcam 1");
// Instantiate the Vuforia engine
vuforia = ClassFactory.getInstance().createVuforia(parameters);
}
/**
* Initialize the TensorFlow Object Detection engine.
*/
private void initTfod() {
int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
tfodParameters.minResultConfidence = 0.75f;
tfodParameters.isModelTensorFlow2 = true;
tfodParameters.inputSize = 300;
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
// Use loadModelFromAsset() if the TF Model is built in as an asset by Android Studio
// Use loadModelFromFile() if you have downloaded a custom team model to the Robot Controller's FLASH.
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
// tfod.loadModelFromFile(TFOD_MODEL_FILE, LABELS);
}
}

View File

@ -1,186 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.external.navigation.RelicRecoveryVuMark;
import org.firstinspires.ftc.robotcore.external.navigation.VuMarkInstanceId;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
/**
* This OpMode illustrates the basics of using the Vuforia engine to determine
* the identity of Vuforia VuMarks encountered on the field. The code is structured as
* a LinearOpMode. It shares much structure with {@link ConceptVuforiaFieldNavigation}; we do not here
* duplicate the core Vuforia documentation found there, but rather instead focus on the
* differences between the use of Vuforia for navigation vs VuMark identification.
*
* @see ConceptVuforiaFieldNavigation
* @see VuforiaLocalizer
* @see VuforiaTrackableDefaultListener
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@TeleOp(name="Concept: VuMark Id", group ="Concept")
@Disabled
public class ConceptVuMarkIdentification extends LinearOpMode {
public static final String TAG = "Vuforia VuMark Sample";
OpenGLMatrix lastLocation = null;
/**
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
* localization engine.
*/
VuforiaLocalizer vuforia;
@Override public void runOpMode() {
/*
* To start up Vuforia, tell it the view that we wish to use for camera monitor (on the RC phone);
*/
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
// OR... Do Not Activate the Camera Monitor View, to save power
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
parameters.vuforiaLicenseKey = " -- YOUR NEW VUFORIA KEY GOES HERE --- ";
/*
* We also indicate which camera on the RC that we wish to use.
* Here we chose the back (HiRes) camera (for greater range), but
* for a competition robot, the front camera might be more convenient.
*/
parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
/**
* Instantiate the Vuforia engine
*/
vuforia = ClassFactory.getInstance().createVuforia(parameters);
/**
* Load the data set containing the VuMarks for Relic Recovery. There's only one trackable
* in this data set: all three of the VuMarks in the game were created from this one template,
* but differ in their instance id information.
* @see VuMarkInstanceId
*/
VuforiaTrackables relicTrackables = this.vuforia.loadTrackablesFromAsset("RelicVuMark");
VuforiaTrackable relicTemplate = relicTrackables.get(0);
relicTemplate.setName("relicVuMarkTemplate"); // can help in debugging; otherwise not necessary
telemetry.addData(">", "Press Play to start");
telemetry.update();
waitForStart();
relicTrackables.activate();
while (opModeIsActive()) {
/**
* See if any of the instances of {@link relicTemplate} are currently visible.
* {@link RelicRecoveryVuMark} is an enum which can have the following values:
* UNKNOWN, LEFT, CENTER, and RIGHT. When a VuMark is visible, something other than
* UNKNOWN will be returned by {@link RelicRecoveryVuMark#from(VuforiaTrackable)}.
*/
RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate);
if (vuMark != RelicRecoveryVuMark.UNKNOWN) {
/* Found an instance of the template. In the actual game, you will probably
* loop until this condition occurs, then move on to act accordingly depending
* on which VuMark was visible. */
telemetry.addData("VuMark", "%s visible", vuMark);
/* For fun, we also exhibit the navigational pose. In the Relic Recovery game,
* it is perhaps unlikely that you will actually need to act on this pose information, but
* we illustrate it nevertheless, for completeness. */
OpenGLMatrix pose = ((VuforiaTrackableDefaultListener)relicTemplate.getListener()).getPose();
telemetry.addData("Pose", format(pose));
/* We further illustrate how to decompose the pose into useful rotational and
* translational components */
if (pose != null) {
VectorF trans = pose.getTranslation();
Orientation rot = Orientation.getOrientation(pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);
// Extract the X, Y, and Z components of the offset of the target relative to the robot
double tX = trans.get(0);
double tY = trans.get(1);
double tZ = trans.get(2);
// Extract the rotational components of the target relative to the robot
double rX = rot.firstAngle;
double rY = rot.secondAngle;
double rZ = rot.thirdAngle;
}
}
else {
telemetry.addData("VuMark", "not visible");
}
telemetry.update();
}
}
String format(OpenGLMatrix transformationMatrix) {
return (transformationMatrix != null) ? transformationMatrix.formatAsTransform() : "null";
}
}

View File

@ -1,194 +0,0 @@
/* Copyright (c) 2017 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.external.navigation.RelicRecoveryVuMark;
import org.firstinspires.ftc.robotcore.external.navigation.VuMarkInstanceId;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
/**
* This OpMode illustrates the basics of using the Vuforia engine to determine
* the identity of Vuforia VuMarks encountered on the field. The code is structured as
* a LinearOpMode. It shares much structure with {@link ConceptVuforiaFieldNavigationWebcam}; we do not here
* duplicate the core Vuforia documentation found there, but rather instead focus on the
* differences between the use of Vuforia for navigation vs VuMark identification.
*
* @see ConceptVuforiaFieldNavigationWebcam
* @see VuforiaLocalizer
* @see VuforiaTrackableDefaultListener
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below
*/
@TeleOp(name="Concept: VuMark Id Webcam", group ="Concept")
@Disabled
public class ConceptVuMarkIdentificationWebcam extends LinearOpMode {
public static final String TAG = "Vuforia VuMark Sample";
OpenGLMatrix lastLocation = null;
/**
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
* localization engine.
*/
VuforiaLocalizer vuforia;
/**
* This is the webcam we are to use. As with other hardware devices such as motors and
* servos, this device is identified using the robot configuration tool in the FTC application.
*/
WebcamName webcamName;
@Override public void runOpMode() {
/*
* Retrieve the camera we are to use.
*/
webcamName = hardwareMap.get(WebcamName.class, "Webcam 1");
/*
* To start up Vuforia, tell it the view that we wish to use for camera monitor (on the RC phone);
* If no camera monitor is desired, use the parameterless constructor instead (commented out below).
*/
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
// OR... Do Not Activate the Camera Monitor View, to save power
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
parameters.vuforiaLicenseKey = " -- YOUR NEW VUFORIA KEY GOES HERE --- ";
/**
* We also indicate which camera on the RC we wish to use. For pedagogical purposes,
* we use the same logic as in {@link ConceptVuforiaNavigationWebcam}.
*/
parameters.cameraName = webcamName;
this.vuforia = ClassFactory.getInstance().createVuforia(parameters);
/**
* Load the data set containing the VuMarks for Relic Recovery. There's only one trackable
* in this data set: all three of the VuMarks in the game were created from this one template,
* but differ in their instance id information.
* @see VuMarkInstanceId
*/
VuforiaTrackables relicTrackables = this.vuforia.loadTrackablesFromAsset("RelicVuMark");
VuforiaTrackable relicTemplate = relicTrackables.get(0);
relicTemplate.setName("relicVuMarkTemplate"); // can help in debugging; otherwise not necessary
telemetry.addData(">", "Press Play to start");
telemetry.update();
waitForStart();
relicTrackables.activate();
while (opModeIsActive()) {
/**
* See if any of the instances of {@link relicTemplate} are currently visible.
* {@link RelicRecoveryVuMark} is an enum which can have the following values:
* UNKNOWN, LEFT, CENTER, and RIGHT. When a VuMark is visible, something other than
* UNKNOWN will be returned by {@link RelicRecoveryVuMark#from(VuforiaTrackable)}.
*/
RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate);
if (vuMark != RelicRecoveryVuMark.UNKNOWN) {
/* Found an instance of the template. In the actual game, you will probably
* loop until this condition occurs, then move on to act accordingly depending
* on which VuMark was visible. */
telemetry.addData("VuMark", "%s visible", vuMark);
/* For fun, we also exhibit the navigational pose. In the Relic Recovery game,
* it is perhaps unlikely that you will actually need to act on this pose information, but
* we illustrate it nevertheless, for completeness. */
OpenGLMatrix pose = ((VuforiaTrackableDefaultListener)relicTemplate.getListener()).getFtcCameraFromTarget();
telemetry.addData("Pose", format(pose));
/* We further illustrate how to decompose the pose into useful rotational and
* translational components */
if (pose != null) {
VectorF trans = pose.getTranslation();
Orientation rot = Orientation.getOrientation(pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);
// Extract the X, Y, and Z components of the offset of the target relative to the robot
double tX = trans.get(0);
double tY = trans.get(1);
double tZ = trans.get(2);
// Extract the rotational components of the target relative to the robot
double rX = rot.firstAngle;
double rY = rot.secondAngle;
double rZ = rot.thirdAngle;
}
}
else {
telemetry.addData("VuMark", "not visible");
}
telemetry.update();
}
}
String format(OpenGLMatrix transformationMatrix) {
return (transformationMatrix != null) ? transformationMatrix.formatAsTransform() : "null";
}
}

View File

@ -1,203 +0,0 @@
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.Range;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
/**
* This OpMode illustrates using a webcam to locate and drive towards ANY Vuforia target.
* The code assumes a basic two-wheel Robot Configuration with motors named left_drive and right_drive.
* The motor directions must be set so a positive drive goes forward and a positive turn rotates to the right.
*
* Under manual control, the left stick will move forward/back, and the right stick will turn left/right.
* This is called POV Joystick mode, different than Tank Drive (where each joystick controls a wheel).
* Manually drive the robot until it displays Target data on the Driver Station.
* Press and hold the *Left Bumper* to enable the automatic "Drive to target" mode.
* Release the Left Bumper to return to manual driving mode.
*
* Use DESIRED_DISTANCE to set how close you want the robot to get to the target.
* Speed and Turn sensitivity can be adjusted using the SPEED_GAIN and TURN_GAIN constants.
*
* For more Vuforia details, or to adapt this OpMode for a phone camera, view the
* ConceptVuforiaFieldNavigation and ConceptVuforiaFieldNavigationWebcam samples.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@TeleOp(name="Drive To Target", group = "Concept")
@Disabled
public class ConceptVuforiaDriveToTargetWebcam extends LinearOpMode
{
// Adjust these numbers to suit your robot.
final double DESIRED_DISTANCE = 8.0; // this is how close the camera should get to the target (inches)
// The GAIN constants set the relationship between the measured position error,
// and how much power is applied to the drive motors. Drive = Error * Gain
// Make these values smaller for smoother control.
final double SPEED_GAIN = 0.02 ; // Speed Control "Gain". eg: Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". eg: Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
final double MM_PER_INCH = 25.40 ; // Metric conversion
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
private static final String VUFORIA_KEY =
" --- YOUR NEW VUFORIA KEY GOES HERE --- ";
VuforiaLocalizer vuforia = null;
OpenGLMatrix targetPose = null;
String targetName = "";
private DcMotor leftDrive = null;
private DcMotor rightDrive = null;
@Override public void runOpMode()
{
/*
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
* To get an on-phone camera preview, use the code below.
* If no camera preview is desired, use the parameter-less constructor instead (commented out below).
*/
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
parameters.vuforiaLicenseKey = VUFORIA_KEY;
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
parameters.useExtendedTracking = false;
// Connect to the camera we are to use. This name must match what is set up in Robot Configuration
parameters.cameraName = hardwareMap.get(WebcamName.class, "Webcam 1");
this.vuforia = ClassFactory.getInstance().createVuforia(parameters);
// Load the trackable objects from the Assets file, and give them meaningful names
VuforiaTrackables targetsPowerPlay = this.vuforia.loadTrackablesFromAsset("PowerPlay");
targetsPowerPlay.get(0).setName("Red Audience Wall");
targetsPowerPlay.get(1).setName("Red Rear Wall");
targetsPowerPlay.get(2).setName("Blue Audience Wall");
targetsPowerPlay.get(3).setName("Blue Rear Wall");
// Start tracking targets in the background
targetsPowerPlay.activate();
// Initialize the hardware variables. Note that the strings used here as parameters
// to 'get' must correspond to the names assigned during the robot configuration
// step (using the FTC Robot Controller app on the phone).
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
telemetry.addData(">", "Press Play to start");
telemetry.update();
waitForStart();
boolean targetFound = false; // Set to true when a target is detected by Vuforia
double targetRange = 0; // Distance from camera to target in Inches
double targetBearing = 0; // Robot Heading, relative to target. Positive degrees means target is to the right.
double drive = 0; // Desired forward power (-1 to +1)
double turn = 0; // Desired turning power (-1 to +1)
while (opModeIsActive())
{
// Look for first visible target, and save its pose.
targetFound = false;
for (VuforiaTrackable trackable : targetsPowerPlay)
{
if (((VuforiaTrackableDefaultListener) trackable.getListener()).isVisible())
{
targetPose = ((VuforiaTrackableDefaultListener)trackable.getListener()).getVuforiaCameraFromTarget();
// if we have a target, process the "pose" to determine the position of the target relative to the robot.
if (targetPose != null)
{
targetFound = true;
targetName = trackable.getName();
VectorF trans = targetPose.getTranslation();
// Extract the X & Y components of the offset of the target relative to the robot
double targetX = trans.get(0) / MM_PER_INCH; // Image X axis
double targetY = trans.get(2) / MM_PER_INCH; // Image Z axis
// target range is based on distance from robot position to origin (right triangle).
targetRange = Math.hypot(targetX, targetY);
// target bearing is based on angle formed between the X axis and the target range line
targetBearing = Math.toDegrees(Math.asin(targetX / targetRange));
break; // jump out of target tracking loop if we find a target.
}
}
}
// Tell the driver what we see, and what to do.
if (targetFound) {
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Target", " %s", targetName);
telemetry.addData("Range", "%5.1f inches", targetRange);
telemetry.addData("Bearing","%3.0f degrees", targetBearing);
} else {
telemetry.addData(">","Drive using joystick to find target\n");
}
// Drive to target Automatically if Left Bumper is being pressed, AND we have found a target.
if (gamepad1.left_bumper && targetFound) {
// Determine heading and range error so we can use them to control the robot automatically.
double rangeError = (targetRange - DESIRED_DISTANCE);
double headingError = targetBearing;
// Use the speed and turn "gains" to calculate how we want the robot to move.
drive = rangeError * SPEED_GAIN;
turn = headingError * TURN_GAIN ;
telemetry.addData("Auto","Drive %5.2f, Turn %5.2f", drive, turn);
} else {
// drive using manual POV Joystick mode.
drive = -gamepad1.left_stick_y / 2.0; // Reduce drive rate to 50%.
turn = gamepad1.right_stick_x / 4.0; // Reduce turn rate to 25%.
telemetry.addData("Manual","Drive %5.2f, Turn %5.2f", drive, turn);
}
telemetry.update();
// Calculate left and right wheel powers and send to them to the motors.
double leftPower = Range.clip(drive + turn, -1.0, 1.0) ;
double rightPower = Range.clip(drive - turn, -1.0, 1.0) ;
leftDrive.setPower(leftPower);
rightDrive.setPower(rightPower);
sleep(10);
}
}
}

View File

@ -1,283 +0,0 @@
/* Copyright (c) 2019 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
import java.util.ArrayList;
import java.util.List;
import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES;
import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ;
import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.YZX;
import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC;
import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK;
/**
* This OpMode illustrates using the Vuforia localizer to determine positioning and orientation of
* robot on the FTC field using the RC phone's camera. The code is structured as a LinearOpMode
*
* Note: If you are using a WEBCAM see ConceptVuforiaFieldNavigationWebcam.java
*
* When images are located, Vuforia is able to determine the position and orientation of the
* image relative to the camera. This sample code then combines that information with a
* knowledge of where the target images are on the field, to determine the location of the camera.
*
* Finally, the location of the camera on the robot is used to determine the
* robot's location and orientation on the field.
*
* To learn more about the FTC field coordinate model, see FTC_FieldCoordinateSystemDefinition.pdf in this folder
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@TeleOp(name="Vuforia Field Nav", group ="Concept")
@Disabled
public class ConceptVuforiaFieldNavigation extends LinearOpMode {
// IMPORTANT: For Phone Camera, set 1) the camera source and 2) the orientation, based on how your phone is mounted:
// 1) Camera Source. Valid choices are: BACK (behind screen) or FRONT (selfie side)
// 2) Phone Orientation. Choices are: PHONE_IS_PORTRAIT = true (portrait) or PHONE_IS_PORTRAIT = false (landscape)
private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK;
private static final boolean PHONE_IS_PORTRAIT = false ;
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
private static final String VUFORIA_KEY =
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
// Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
// We will define some constants and conversions here. These are useful for the FTC competition field.
private static final float mmPerInch = 25.4f;
private static final float mmTargetHeight = 6 * mmPerInch; // the height of the center of the target image above the floor
private static final float halfField = 72 * mmPerInch;
private static final float halfTile = 12 * mmPerInch;
private static final float oneAndHalfTile = 36 * mmPerInch;
// Class Members
private OpenGLMatrix lastLocation = null;
private VuforiaLocalizer vuforia = null;
private VuforiaTrackables targets = null ;
private boolean targetVisible = false;
private float phoneXRotate = 0;
private float phoneYRotate = 0;
private float phoneZRotate = 0;
@Override public void runOpMode() {
/*
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
* To get an on-phone camera preview, use the code below.
* If no camera preview is desired, use the parameter-less constructor instead (commented out below).
*/
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
parameters.vuforiaLicenseKey = VUFORIA_KEY;
parameters.cameraDirection = CAMERA_CHOICE;
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
parameters.useExtendedTracking = false;
// Instantiate the Vuforia engine
vuforia = ClassFactory.getInstance().createVuforia(parameters);
// Load the trackable assets.
targets = this.vuforia.loadTrackablesFromAsset("PowerPlay");
// For convenience, gather together all the trackable objects in one easily-iterable collection */
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
allTrackables.addAll(targets);
/**
* In order for localization to work, we need to tell the system where each target is on the field, and
* where the phone resides on the robot. These specifications are in the form of <em>transformation matrices.</em>
* Transformation matrices are a central, important concept in the math here involved in localization.
* See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
* for detailed information. Commonly, you'll encounter transformation matrices as instances
* of the {@link OpenGLMatrix} class.
*
* If you are standing in the Red Alliance Station looking towards the center of the field,
* - The X axis runs from your left to the right. (positive from the center to the right)
* - The Y axis runs from the Red Alliance Station towards the other side of the field
* where the Blue Alliance Station is. (Positive is from the center, towards the BlueAlliance station)
* - The Z axis runs from the floor, upwards towards the ceiling. (Positive is above the floor)
*
* Before being transformed, each target image is conceptually located at the origin of the field's
* coordinate system (the center of the field), facing up.
*/
// Name and locate each trackable object
identifyTarget(0, "Red Audience Wall", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(1, "Red Rear Wall", halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, -90);
identifyTarget(2, "Blue Audience Wall", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(3, "Blue Rear Wall", halfField, oneAndHalfTile, mmTargetHeight, 90, 0, -90);
/*
* Create a transformation matrix describing where the phone is on the robot.
*
* NOTE !!!! It's very important that you turn OFF your phone's Auto-Screen-Rotation option.
* Lock it into Portrait for these numbers to work.
*
* Info: The coordinate frame for the robot looks the same as the field.
* The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
* Z is UP on the robot. This equates to a heading angle of Zero degrees.
*
* The phone starts out lying flat, with the screen facing Up and with the physical top of the phone
* pointing to the LEFT side of the Robot.
* The two examples below assume that the camera is facing forward out the front of the robot.
*/
// We need to rotate the camera around its long axis to bring the correct camera forward.
if (CAMERA_CHOICE == BACK) {
phoneYRotate = -90;
} else {
phoneYRotate = 90;
}
// Rotate the phone vertical about the X axis if it's in portrait mode
if (PHONE_IS_PORTRAIT) {
phoneXRotate = 90 ;
}
// Next, translate the camera lens to where it is on the robot.
// In this example, it is centered on the robot (left-to-right and front-to-back), and 6 inches above ground level.
final float CAMERA_FORWARD_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the forward distance from the center of the robot to the camera lens
final float CAMERA_VERTICAL_DISPLACEMENT = 6.0f * mmPerInch; // eg: Camera is 6 Inches above ground
final float CAMERA_LEFT_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the left distance from the center of the robot to the camera lens
OpenGLMatrix robotFromCamera = OpenGLMatrix
.translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, YZX, DEGREES, phoneYRotate, phoneZRotate, phoneXRotate));
/** Let all the trackable listeners know where the phone is. */
for (VuforiaTrackable trackable : allTrackables) {
((VuforiaTrackableDefaultListener) trackable.getListener()).setPhoneInformation(robotFromCamera, parameters.cameraDirection);
}
/*
* WARNING:
* In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
* This sequence is used to enable the new remote DS Camera Stream feature to be used with this sample.
* CONSEQUENTLY do not put any driving commands in this loop.
* To restore the normal opmode structure, just un-comment the following line:
*/
// waitForStart();
/* Note: To use the remote camera preview:
* AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
* Tap the preview window to receive a fresh image.
* It is not permitted to transition to RUN while the camera preview window is active.
* Either press STOP to exit the OpMode, or use the "options menu" again, and select "Camera Stream" to close the preview window.
*/
targets.activate();
while (!isStopRequested()) {
// check all the trackable targets to see which one (if any) is visible.
targetVisible = false;
for (VuforiaTrackable trackable : allTrackables) {
if (((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible()) {
telemetry.addData("Visible Target", trackable.getName());
targetVisible = true;
// getUpdatedRobotLocation() will return null if no new information is available since
// the last time that call was made, or if the trackable is not currently visible.
OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation();
if (robotLocationTransform != null) {
lastLocation = robotLocationTransform;
}
break;
}
}
// Provide feedback as to where the robot is located (if we know).
if (targetVisible) {
// express position (translation) of robot in inches.
VectorF translation = lastLocation.getTranslation();
telemetry.addData("Pos (inches)", "{X, Y, Z} = %.1f, %.1f, %.1f",
translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
// express the rotation of the robot in degrees.
Orientation rotation = Orientation.getOrientation(lastLocation, EXTRINSIC, XYZ, DEGREES);
telemetry.addData("Rot (deg)", "{Roll, Pitch, Heading} = %.0f, %.0f, %.0f", rotation.firstAngle, rotation.secondAngle, rotation.thirdAngle);
}
else {
telemetry.addData("Visible Target", "none");
}
telemetry.update();
}
// Disable Tracking when we are done;
targets.deactivate();
}
/***
* Identify a target by naming it, and setting its position and orientation on the field
* @param targetIndex
* @param targetName
* @param dx, dy, dz Target offsets in x,y,z axes
* @param rx, ry, rz Target rotations in x,y,z axes
*/
void identifyTarget(int targetIndex, String targetName, float dx, float dy, float dz, float rx, float ry, float rz) {
VuforiaTrackable aTarget = targets.get(targetIndex);
aTarget.setName(targetName);
aTarget.setLocation(OpenGLMatrix.translation(dx, dy, dz)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, rx, ry, rz)));
}
}

View File

@ -1,274 +0,0 @@
/* Copyright (c) 2019 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.ClassFactory;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
import java.util.ArrayList;
import java.util.List;
import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES;
import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ;
import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XZY;
import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC;
/**
* This OpMode illustrates using the Vuforia localizer to determine positioning and orientation of
* robot on the FTC field using a WEBCAM. The code is structured as a LinearOpMode
*
* NOTE: If you are running on a Phone with a built-in camera, use the ConceptVuforiaFieldNavigation example instead of this one.
* NOTE: It is possible to switch between multiple WebCams (eg: one for the left side and one for the right).
* For a related example of how to do this, see ConceptTensorFlowObjectDetectionSwitchableCameras
*
* When images are located, Vuforia is able to determine the position and orientation of the
* image relative to the camera. This sample code then combines that information with a
* knowledge of where the target images are on the field, to determine the location of the camera.
*
* Finally, the location of the camera on the robot is used to determine the
* robot's location and orientation on the field.
*
* To learn more about the FTC field coordinate model, see FTC_FieldCoordinateSystemDefinition.pdf in this folder
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
* is explained below.
*/
@TeleOp(name="Vuforia Field Nav Webcam", group ="Concept")
@Disabled
public class ConceptVuforiaFieldNavigationWebcam extends LinearOpMode {
/*
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
* web site at https://developer.vuforia.com/license-manager.
*
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
* random data. As an example, here is a example of a fragment of a valid key:
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
* Once you've obtained a license key, copy the string from the Vuforia web site
* and paste it in to your code on the next line, between the double quotes.
*/
private static final String VUFORIA_KEY =
" --- YOUR NEW VUFORIA KEY GOES HERE --- ";
// Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
// We will define some constants and conversions here
private static final float mmPerInch = 25.4f;
private static final float mmTargetHeight = 6 * mmPerInch; // the height of the center of the target image above the floor
private static final float halfField = 72 * mmPerInch;
private static final float halfTile = 12 * mmPerInch;
private static final float oneAndHalfTile = 36 * mmPerInch;
// Class Members
private OpenGLMatrix lastLocation = null;
private VuforiaLocalizer vuforia = null;
private VuforiaTrackables targets = null ;
private WebcamName webcamName = null;
private boolean targetVisible = false;
@Override public void runOpMode() {
// Connect to the camera we are to use. This name must match what is set up in Robot Configuration
webcamName = hardwareMap.get(WebcamName.class, "Webcam 1");
/*
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
* We can pass Vuforia the handle to a camera preview resource (on the RC screen);
* If no camera-preview is desired, use the parameter-less constructor instead (commented out below).
* Note: A preview window is required if you want to view the camera stream on the Driver Station Phone.
*/
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
parameters.vuforiaLicenseKey = VUFORIA_KEY;
// We also indicate which camera we wish to use.
parameters.cameraName = webcamName;
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
parameters.useExtendedTracking = false;
// Instantiate the Vuforia engine
vuforia = ClassFactory.getInstance().createVuforia(parameters);
// Load the data sets for the trackable objects. These particular data
// sets are stored in the 'assets' part of our application.
targets = this.vuforia.loadTrackablesFromAsset("PowerPlay");
// For convenience, gather together all the trackable objects in one easily-iterable collection */
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
allTrackables.addAll(targets);
/**
* In order for localization to work, we need to tell the system where each target is on the field, and
* where the phone resides on the robot. These specifications are in the form of <em>transformation matrices.</em>
* Transformation matrices are a central, important concept in the math here involved in localization.
* See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
* for detailed information. Commonly, you'll encounter transformation matrices as instances
* of the {@link OpenGLMatrix} class.
*
* If you are standing in the Red Alliance Station looking towards the center of the field,
* - The X axis runs from your left to the right. (positive from the center to the right)
* - The Y axis runs from the Red Alliance Station towards the other side of the field
* where the Blue Alliance Station is. (Positive is from the center, towards the BlueAlliance station)
* - The Z axis runs from the floor, upwards towards the ceiling. (Positive is above the floor)
*
* Before being transformed, each target image is conceptually located at the origin of the field's
* coordinate system (the center of the field), facing up.
*/
// Name and locate each trackable object
identifyTarget(0, "Red Audience Wall", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(1, "Red Rear Wall", halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, -90);
identifyTarget(2, "Blue Audience Wall", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
identifyTarget(3, "Blue Rear Wall", halfField, oneAndHalfTile, mmTargetHeight, 90, 0, -90);
/*
* Create a transformation matrix describing where the camera is on the robot.
*
* Info: The coordinate frame for the robot looks the same as the field.
* The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
* Z is UP on the robot. This equates to a bearing angle of Zero degrees.
*
* For a WebCam, the default starting orientation of the camera is looking UP (pointing in the Z direction),
* with the wide (horizontal) axis of the camera aligned with the X axis, and
* the Narrow (vertical) axis of the camera aligned with the Y axis
*
* But, this example assumes that the camera is actually facing forward out the front of the robot.
* So, the "default" camera position requires two rotations to get it oriented correctly.
* 1) First it must be rotated +90 degrees around the X axis to get it horizontal (its now facing out the right side of the robot)
* 2) Next it must be be rotated +90 degrees (counter-clockwise) around the Z axis to face forward.
*
* Finally the camera can be translated to its actual mounting position on the robot.
* In this example, it is centered on the robot (left-to-right and front-to-back), and 6 inches above ground level.
*/
final float CAMERA_FORWARD_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the forward distance from the center of the robot to the camera lens
final float CAMERA_VERTICAL_DISPLACEMENT = 6.0f * mmPerInch; // eg: Camera is 6 Inches above ground
final float CAMERA_LEFT_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the left distance from the center of the robot to the camera lens
OpenGLMatrix cameraLocationOnRobot = OpenGLMatrix
.translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XZY, DEGREES, 90, 90, 0));
/** Let all the trackable listeners know where the camera is. */
for (VuforiaTrackable trackable : allTrackables) {
((VuforiaTrackableDefaultListener) trackable.getListener()).setCameraLocationOnRobot(parameters.cameraName, cameraLocationOnRobot);
}
/*
* WARNING:
* In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
* This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample.
* CONSEQUENTLY do not put any driving commands in this loop.
* To restore the normal opmode structure, just un-comment the following line:
*/
// waitForStart();
/* Note: To use the remote camera preview:
* AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
* Tap the preview window to receive a fresh image.
* It is not permitted to transition to RUN while the camera preview window is active.
* Either press STOP to exit the OpMode, or use the "options menu" again, and select "Camera Stream" to close the preview window.
*/
targets.activate();
while (!isStopRequested()) {
// check all the trackable targets to see which one (if any) is visible.
targetVisible = false;
for (VuforiaTrackable trackable : allTrackables) {
if (((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible()) {
telemetry.addData("Visible Target", trackable.getName());
targetVisible = true;
// getUpdatedRobotLocation() will return null if no new information is available since
// the last time that call was made, or if the trackable is not currently visible.
OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation();
if (robotLocationTransform != null) {
lastLocation = robotLocationTransform;
}
break;
}
}
// Provide feedback as to where the robot is located (if we know).
if (targetVisible) {
// express position (translation) of robot in inches.
VectorF translation = lastLocation.getTranslation();
telemetry.addData("Pos (inches)", "{X, Y, Z} = %.1f, %.1f, %.1f",
translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
// express the rotation of the robot in degrees.
Orientation rotation = Orientation.getOrientation(lastLocation, EXTRINSIC, XYZ, DEGREES);
telemetry.addData("Rot (deg)", "{Roll, Pitch, Heading} = %.0f, %.0f, %.0f", rotation.firstAngle, rotation.secondAngle, rotation.thirdAngle);
}
else {
telemetry.addData("Visible Target", "none");
}
telemetry.update();
}
// Disable Tracking when we are done;
targets.deactivate();
}
/***
* Identify a target by naming it, and setting its position and orientation on the field
* @param targetIndex
* @param targetName
* @param dx, dy, dz Target offsets in x,y,z axes
* @param rx, ry, rz Target rotations in x,y,z axes
*/
void identifyTarget(int targetIndex, String targetName, float dx, float dy, float dz, float rx, float ry, float rz) {
VuforiaTrackable aTarget = targets.get(targetIndex);
aTarget.setName(targetName);
aTarget.setLocation(OpenGLMatrix.translation(dx, dy, dz)
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, rx, ry, rz)));
}
}

View File

@ -72,7 +72,7 @@ import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
* The angle of movement/rotation is assumed to be a standardized rotation around the robot Z axis,
* which means that a Positive rotation is Counter Clockwise, looking down on the field.
* This is consistent with the FTC field coordinate conventions set out in the document:
* ftc_app\doc\tutorial\FTC_FieldCoordinateSystemDefinition.pdf
* https://ftc-docs.firstinspires.org/field-coordinate-system
*
* Control Approach.
*

View File

@ -0,0 +1,293 @@
/* Copyright (c) 2023 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.Range;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.ExposureControl;
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.GainControl;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
* The code assumes a Holonomic (Mecanum or X Drive) Robot.
*
* The drive goal is to rotate to keep the Tag centered in the camera, while strafing to be directly in front of the tag, and
* driving towards the tag to achieve the desired distance.
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
* You can determine the best Exposure and Gain values by using the ConceptAprilTagOptimizeExposure OpMode in this Samples folder.
*
* The code assumes a Robot Configuration with motors named: leftfront_drive and rightfront_drive, leftback_drive and rightback_drive.
* The motor directions must be set so a positive power goes forward on all wheels.
* This sample assumes that the current game AprilTag Library (usually for the current season) is being loaded by default,
* so you should choose to approach a valid tag ID (usually starting at 0)
*
* Under manual control, the left stick will move forward/back & left/right. The right stick will rotate the robot.
* Manually drive the robot until it displays Target data on the Driver Station.
*
* Press and hold the *Left Bumper* to enable the automatic "Drive to target" mode.
* Release the Left Bumper to return to manual driving mode.
*
* Under "Drive To Target" mode, the robot has three goals:
* 1) Turn the robot to always keep the Tag centered on the camera frame. (Use the Target Bearing to turn the robot.)
* 2) Strafe the robot towards the centerline of the Tag, so it approaches directly in front of the tag. (Use the Target Yaw to strafe the robot)
* 3) Drive towards the Tag to get to the desired distance. (Use Tag Range to drive the robot forward/backward)
*
* Use DESIRED_DISTANCE to set how close you want the robot to get to the target.
* Speed and Turn sensitivity can be adjusted using the SPEED_GAIN, STRAFE_GAIN and TURN_GAIN constants.
*
* Use Android Studio to Copy this Class, and Paste it into the TeamCode/src/main/java/org/firstinspires/ftc/teamcode folder.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
*/
@TeleOp(name="Omni Drive To AprilTag", group = "Concept")
@Disabled
public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
{
// Adjust these numbers to suit your robot.
final double DESIRED_DISTANCE = 12.0; // this is how close the camera should get to the target (inches)
// Set the GAIN constants to control the relationship between the measured position error, and how much power is
// applied to the drive motors to correct the error.
// Drive = Error * Gain Make these values smaller for smoother control, or larger for a more aggressive response.
final double SPEED_GAIN = 0.02 ; // Forward Speed Control "Gain". eg: Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
final double STRAFE_GAIN = 0.015 ; // Strafe Speed Control "Gain". eg: Ramp up to 25% power at a 25 degree Yaw error. (0.25 / 25.0)
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". eg: Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
final double MAX_AUTO_SPEED = 0.5; // Clip the approach speed to this max value (adjust for your robot)
final double MAX_AUTO_STRAFE= 0.5; // Clip the approach speed to this max value (adjust for your robot)
final double MAX_AUTO_TURN = 0.3; // Clip the turn speed to this max value (adjust for your robot)
private DcMotor leftFrontDrive = null; // Used to control the left front drive wheel
private DcMotor rightFrontDrive = null; // Used to control the right front drive wheel
private DcMotor leftBackDrive = null; // Used to control the left back drive wheel
private DcMotor rightBackDrive = null; // Used to control the right back drive wheel
private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
private static final int DESIRED_TAG_ID = 0; // Choose the tag you want to approach or set to -1 for ANY tag.
private VisionPortal visionPortal; // Used to manage the video source.
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
@Override public void runOpMode()
{
boolean targetFound = false; // Set to true when an AprilTag target is detected
double drive = 0; // Desired forward power/speed (-1 to +1)
double strafe = 0; // Desired strafe power/speed (-1 to +1)
double turn = 0; // Desired turning power/speed (-1 to +1)
// Initialize the Apriltag Detection process
initAprilTag();
// Initialize the hardware variables. Note that the strings used here as parameters
// to 'get' must match the names assigned during the robot configuration.
// step (using the FTC Robot Controller app on the phone).
leftFrontDrive = hardwareMap.get(DcMotor.class, "leftfront_drive");
rightFrontDrive = hardwareMap.get(DcMotor.class, "rightfront_drive");
leftBackDrive = hardwareMap.get(DcMotor.class, "leftback_drive");
rightBackDrive = hardwareMap.get(DcMotor.class, "rightback_drive");
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
// When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
leftFrontDrive.setDirection(DcMotor.Direction.REVERSE);
leftBackDrive.setDirection(DcMotor.Direction.REVERSE);
rightFrontDrive.setDirection(DcMotor.Direction.FORWARD);
rightBackDrive.setDirection(DcMotor.Direction.FORWARD);
if (USE_WEBCAM)
setManualExposure(6, 250); // Use low exposure time to reduce motion blur
// Wait for driver to press start
telemetry.addData("Camera preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
while (opModeIsActive())
{
targetFound = false;
desiredTag = null;
// Step through the list of detected tags and look for a matching tag
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
for (AprilTagDetection detection : currentDetections) {
if ((detection.metadata != null)
&& ((DESIRED_TAG_ID >= 0) || (detection.id == DESIRED_TAG_ID)) ){
targetFound = true;
desiredTag = detection;
break; // don't look any further.
}
}
// Tell the driver what we see, and what to do.
if (targetFound) {
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Target", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
telemetry.addData("Yaw","%3.0f degrees", desiredTag.ftcPose.yaw);
} else {
telemetry.addData(">","Drive using joystick to find target\n");
}
// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
if (gamepad1.left_bumper && targetFound) {
// Determine heading, range and Yaw (tag image rotation) error so we can use them to control the robot automatically.
double rangeError = (desiredTag.ftcPose.range - DESIRED_DISTANCE);
double headingError = desiredTag.ftcPose.bearing;
double yawError = desiredTag.ftcPose.yaw;
// Use the speed and turn "gains" to calculate how we want the robot to move.
drive = Range.clip(rangeError * SPEED_GAIN, -MAX_AUTO_SPEED, MAX_AUTO_SPEED);
turn = Range.clip(headingError * TURN_GAIN, -MAX_AUTO_TURN, MAX_AUTO_TURN) ;
strafe = Range.clip(-yawError * STRAFE_GAIN, -MAX_AUTO_STRAFE, MAX_AUTO_STRAFE);
telemetry.addData("Auto","Drive %5.2f, Strafe %5.2f, Turn %5.2f ", drive, strafe, turn);
} else {
// drive using manual POV Joystick mode. Slow things down to make the robot more controlable.
drive = -gamepad1.left_stick_y / 2.0; // Reduce drive rate to 50%.
strafe = -gamepad1.left_stick_x / 2.0; // Reduce strafe rate to 50%.
turn = -gamepad1.right_stick_x / 3.0; // Reduce turn rate to 33%.
telemetry.addData("Manual","Drive %5.2f, Strafe %5.2f, Turn %5.2f ", drive, strafe, turn);
}
telemetry.update();
// Apply desired axes motions to the drivetrain.
moveRobot(drive, strafe, turn);
sleep(10);
}
}
/**
* Move robot according to desired axes motions
* Positive X is forward
* Positive Y is strafe left
* Positive Yaw is counter-clockwise
*/
public void moveRobot(double x, double y, double yaw) {
// Calculate wheel powers.
double leftFrontPower = x -y -yaw;
double rightFrontPower = x +y +yaw;
double leftBackPower = x +y -yaw;
double rightBackPower = x -y +yaw;
// Normalize wheel powers to be less than 1.0
double max = Math.max(Math.abs(leftFrontPower), Math.abs(rightFrontPower));
max = Math.max(max, Math.abs(leftBackPower));
max = Math.max(max, Math.abs(rightBackPower));
if (max > 1.0) {
leftFrontPower /= max;
rightFrontPower /= max;
leftBackPower /= max;
rightBackPower /= max;
}
// Send powers to the wheels.
leftFrontDrive.setPower(leftFrontPower);
rightFrontDrive.setPower(rightFrontPower);
leftBackDrive.setPower(leftBackPower);
rightBackDrive.setPower(rightBackPower);
}
/**
* Initialize the AprilTag processor.
*/
private void initAprilTag() {
// Create the AprilTag processor by using a builder.
aprilTag = new AprilTagProcessor.Builder().build();
// Create the vision portal by using a builder.
if (USE_WEBCAM) {
visionPortal = new VisionPortal.Builder()
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.addProcessor(aprilTag)
.build();
} else {
visionPortal = new VisionPortal.Builder()
.setCamera(BuiltinCameraDirection.BACK)
.addProcessor(aprilTag)
.build();
}
}
/*
Manually set the camera gain and exposure.
This can only be called AFTER calling initAprilTag(), and only works for Webcams;
*/
private void setManualExposure(int exposureMS, int gain) {
// Wait for the camera to be open, then use the controls
if (visionPortal == null) {
return;
}
// Make sure camera is streaming before we try to set the exposure controls
if (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING) {
telemetry.addData("Camera", "Waiting");
telemetry.update();
while (!isStopRequested() && (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING)) {
sleep(20);
}
telemetry.addData("Camera", "Ready");
telemetry.update();
}
// Set camera controls unless we are stopping.
if (!isStopRequested())
{
ExposureControl exposureControl = visionPortal.getCameraControl(ExposureControl.class);
if (exposureControl.getMode() != ExposureControl.Mode.Manual) {
exposureControl.setMode(ExposureControl.Mode.Manual);
sleep(50);
}
exposureControl.setExposure((long)exposureMS, TimeUnit.MILLISECONDS);
sleep(20);
GainControl gainControl = visionPortal.getCameraControl(GainControl.class);
gainControl.setGain(gain);
sleep(20);
}
}
}

View File

@ -0,0 +1,271 @@
/* Copyright (c) 2023 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.Range;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.ExposureControl;
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.GainControl;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
import java.util.List;
import java.util.concurrent.TimeUnit;
/**
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
* The code assumes a basic two-wheel (Tank) Robot Drivetrain
*
* The driving goal is to rotate to keep the tag centered in the camera, while driving towards the tag to achieve the desired distance.
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
* You can determine the best exposure and gain values by using the ConceptAprilTagOptimizeExposure OpMode in this Samples folder.
*
* The code assumes a Robot Configuration with motors named left_drive and right_drive.
* The motor directions must be set so a positive power goes forward on both wheels;
* This sample assumes that the default AprilTag Library (usually for the current season) is being loaded by default
* so you should choose to approach a valid tag ID (usually starting at 0)
*
* Under manual control, the left stick will move forward/back, and the right stick will rotate the robot.
* This is called POV Joystick mode, different than Tank Drive (where each joystick controls a wheel).
*
* Manually drive the robot until it displays Target data on the Driver Station.
* Press and hold the *Left Bumper* to enable the automatic "Drive to target" mode.
* Release the Left Bumper to return to manual driving mode.
*
* Under "Drive To Target" mode, the robot has two goals:
* 1) Turn the robot to always keep the Tag centered on the camera frame. (Use the Target Bearing to turn the robot.)
* 2) Drive towards the Tag to get to the desired distance. (Use Tag Range to drive the robot forward/backward)
*
* Use DESIRED_DISTANCE to set how close you want the robot to get to the target.
* Speed and Turn sensitivity can be adjusted using the SPEED_GAIN and TURN_GAIN constants.
*
* Use Android Studio to Copy this Class, and Paste it into the TeamCode/src/main/java/org/firstinspires/ftc/teamcode folder.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*
*/
@TeleOp(name="Tank Drive To AprilTag", group = "Concept")
@Disabled
public class RobotAutoDriveToAprilTagTank extends LinearOpMode
{
// Adjust these numbers to suit your robot.
final double DESIRED_DISTANCE = 12.0; // this is how close the camera should get to the target (inches)
// Set the GAIN constants to control the relationship between the measured position error, and how much power is
// applied to the drive motors to correct the error.
// Drive = Error * Gain Make these values smaller for smoother control, or larger for a more aggressive response.
final double SPEED_GAIN = 0.02 ; // Speed Control "Gain". eg: Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". eg: Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
final double MAX_AUTO_SPEED = 0.5; // Clip the approach speed to this max value (adjust for your robot)
final double MAX_AUTO_TURN = 0.25; // Clip the turn speed to this max value (adjust for your robot)
private DcMotor leftDrive = null; // Used to control the left drive wheel
private DcMotor rightDrive = null; // Used to control the right drive wheel
private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
private static final int DESIRED_TAG_ID = 0; // Choose the tag you want to approach or set to -1 for ANY tag.
private VisionPortal visionPortal; // Used to manage the video source.
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
@Override public void runOpMode()
{
boolean targetFound = false; // Set to true when an AprilTag target is detected
double drive = 0; // Desired forward power/speed (-1 to +1) +ve is forward
double turn = 0; // Desired turning power/speed (-1 to +1) +ve is CounterClockwise
// Initialize the Apriltag Detection process
initAprilTag();
// Initialize the hardware variables. Note that the strings used here as parameters
// to 'get' must match the names assigned during the robot configuration.
// step (using the FTC Robot Controller app on the phone).
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
// To drive forward, most robots need the motor on one side to be reversed because the axles point in opposite directions.
// When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive.
// Note: The settings here assume direct drive on left and right wheels. Single Gear Reduction or 90 Deg drives may require direction flips
leftDrive.setDirection(DcMotor.Direction.REVERSE);
rightDrive.setDirection(DcMotor.Direction.FORWARD);
if (USE_WEBCAM)
setManualExposure(6, 250); // Use low exposure time to reduce motion blur
// Wait for the driver to press Start
telemetry.addData("Camera preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
while (opModeIsActive())
{
targetFound = false;
desiredTag = null;
// Step through the list of detected tags and look for a matching tag
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
for (AprilTagDetection detection : currentDetections) {
if ((detection.metadata != null)
&& ((DESIRED_TAG_ID >= 0) || (detection.id == DESIRED_TAG_ID)) ){
targetFound = true;
desiredTag = detection;
break; // don't look any further.
}
}
// Tell the driver what we see, and what to do.
if (targetFound) {
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
telemetry.addData("Target", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
} else {
telemetry.addData(">","Drive using joystick to find target\n");
}
// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
if (gamepad1.left_bumper && targetFound) {
// Determine heading and range error so we can use them to control the robot automatically.
double rangeError = (desiredTag.ftcPose.range - DESIRED_DISTANCE);
double headingError = desiredTag.ftcPose.bearing;
// Use the speed and turn "gains" to calculate how we want the robot to move. Clip it to the maximum
drive = Range.clip(rangeError * SPEED_GAIN, -MAX_AUTO_SPEED, MAX_AUTO_SPEED);
turn = Range.clip(headingError * TURN_GAIN, -MAX_AUTO_TURN, MAX_AUTO_TURN) ;
telemetry.addData("Auto","Drive %5.2f, Turn %5.2f", drive, turn);
} else {
// drive using manual POV Joystick mode.
drive = -gamepad1.left_stick_y / 2.0; // Reduce drive rate to 50%.
turn = -gamepad1.right_stick_x / 4.0; // Reduce turn rate to 25%.
telemetry.addData("Manual","Drive %5.2f, Turn %5.2f", drive, turn);
}
telemetry.update();
// Apply desired axes motions to the drivetrain.
moveRobot(drive, turn);
sleep(10);
}
}
/**
* Move robot according to desired axes motions
* Positive X is forward
* Positive Yaw is counter-clockwise
*/
public void moveRobot(double x, double yaw) {
// Calculate left and right wheel powers.
double leftPower = x - yaw;
double rightPower = x + yaw;
// Normalize wheel powers to be less than 1.0
double max = Math.max(Math.abs(leftPower), Math.abs(rightPower));
if (max >1.0) {
leftPower /= max;
rightPower /= max;
}
// Send powers to the wheels.
leftDrive.setPower(leftPower);
rightDrive.setPower(rightPower);
}
/**
* Initialize the AprilTag processor.
*/
private void initAprilTag() {
// Create the AprilTag processor by using a builder.
aprilTag = new AprilTagProcessor.Builder().build();
// Create the vision portal by using a builder.
if (USE_WEBCAM) {
visionPortal = new VisionPortal.Builder()
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.addProcessor(aprilTag)
.build();
} else {
visionPortal = new VisionPortal.Builder()
.setCamera(BuiltinCameraDirection.BACK)
.addProcessor(aprilTag)
.build();
}
}
/*
Manually set the camera gain and exposure.
This can only be called AFTER calling initAprilTag(), and only works for Webcams;
*/
private void setManualExposure(int exposureMS, int gain) {
// Wait for the camera to be open, then use the controls
if (visionPortal == null) {
return;
}
// Make sure camera is streaming before we try to set the exposure controls
if (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING) {
telemetry.addData("Camera", "Waiting");
telemetry.update();
while (!isStopRequested() && (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING)) {
sleep(20);
}
telemetry.addData("Camera", "Ready");
telemetry.update();
}
// Set camera controls unless we are stopping.
if (!isStopRequested())
{
ExposureControl exposureControl = visionPortal.getCameraControl(ExposureControl.class);
if (exposureControl.getMode() != ExposureControl.Mode.Manual) {
exposureControl.setMode(ExposureControl.Mode.Manual);
sleep(50);
}
exposureControl.setExposure((long)exposureMS, TimeUnit.MILLISECONDS);
sleep(20);
GainControl gainControl = visionPortal.getCameraControl(GainControl.class);
gainControl.setGain(gain);
sleep(20);
telemetry.addData("Camera", "Ready");
telemetry.update();
}
}
}

View File

@ -49,13 +49,16 @@ import java.util.Locale;
/**
* {@link SensorBNO055IMU} gives a short demo on how to use the BNO055 Inertial Motion Unit (IMU) from AdaFruit.
*
* Note: this is a Legacy example that will not work with newer Control/Expansion Hubs that use a different IMU
* Please use the new SensorIMUOrthogonal or SensorIMUNonOrthogonal samples for a more universal IMU interface.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
*
* @see <a href="http://www.adafruit.com/products/2472">Adafruit IMU</a>
*/
@TeleOp(name = "Sensor: BNO055 IMU", group = "Sensor")
@Disabled // Comment this out to add to the opmode list
@Disabled // Comment this out to add to the opmode list
public class SensorBNO055IMU extends LinearOpMode
{
//----------------------------------------------------------------------------------------------

View File

@ -50,6 +50,9 @@ import java.util.Locale;
* {@link SensorBNO055IMUCalibration} calibrates the IMU accelerometer per
* "Section 3.11 Calibration" of the BNO055 specification.
*
* Note: this is a Legacy example that will not work with newer Control/Expansion Hubs that use a different IMU
* Please use the new SensorIMUOrthogonal or SensorIMUNonOrthogonal samples for a more universal IMU interface.
*
* <p>Manual calibration of the IMU is definitely NOT necessary: except for the magnetometer
* (which is not used by the default {@link BNO055IMU.SensorMode#IMU
* SensorMode#IMU}), the BNO055 is internally self-calibrating and thus can be very successfully

View File

@ -0,0 +1,183 @@
/* Copyright (c) 2022 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import static com.qualcomm.hardware.rev.RevHubOrientationOnRobot.xyzOrientation;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
/**
* {@link SensorIMUNonOrthogonal} shows how to use the new universal {@link IMU} interface. This
* interface may be used with the BNO055 IMU or the BHI260 IMU. It assumes that an IMU is configured
* on the robot with the name "imu".
* <p>
* The sample will display the current Yaw, Pitch and Roll of the robot.<br>
* With the correct orientation parameters selected, pitch/roll/yaw should act as follows:
* <p>
* Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X) <br>
* Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y) <br>
* Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z) <br>
* <p>
* The yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller)
* <p>
* This specific sample DOES NOT assume that the Hub is mounted on one of the three orthogonal
* planes (X/Y, X/Z or Y/Z) OR that the Hub has only been rotated in a range of 90 degree increments.
* <p>
* Note: if your Hub is mounted Orthogonally (on a orthogonal surface, angled at some multiple of
* 90 Degrees) then you should use the simpler SensorImuOrthogonal sample in this folder.
* <p>
* But... If your Hub is mounted Non-Orthogonally, you must specify one or more rotational angles
* that transform a "Default" Hub orientation into your desired orientation. That is what is
* illustrated here.
* <p>
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
* <p>
* Finally, edit this OpMode to use at least one angle around an axis to orient your Hub.
*/
@TeleOp(name = "Sensor: IMU Non-Orthogonal", group = "Sensor")
@Disabled // Comment this out to add to the OpMode list
public class SensorIMUNonOrthogonal extends LinearOpMode
{
// The IMU sensor object
IMU imu;
//----------------------------------------------------------------------------------------------
// Main logic
//----------------------------------------------------------------------------------------------
@Override public void runOpMode() throws InterruptedException {
// Retrieve and initialize the IMU.
// This sample expects the IMU to be in a REV Hub and named "imu".
imu = hardwareMap.get(IMU.class, "imu");
/* Define how the hub is mounted to the robot to get the correct Yaw, Pitch and Roll values.
*
* You can apply up to three axis rotations to orient your Hub according to how it's mounted on the robot.
*
* The starting point for these rotations is the "Default" Hub orientation, which is:
* 1) Hub laying flat on a horizontal surface, with the Printed Logo facing UP
* 2) Rotated such that the USB ports are facing forward on the robot.
*
* The order that the rotations are performed matters, so this sample shows doing them in the order X, Y, then Z.
* For specifying non-orthogonal hub mounting orientations, we must temporarily use axes
* defined relative to the Hub itself, instead of the usual Robot Coordinate System axes
* used for the results the IMU gives us. In the starting orientation, the Hub axes are
* aligned with the Robot Coordinate System:
*
* X Axis: Starting at Center of Hub, pointing out towards I2C connectors
* Y Axis: Starting at Center of Hub, pointing out towards USB connectors
* Z Axis: Starting at Center of Hub, pointing Up through LOGO
*
* Positive rotation is defined by right-hand rule with thumb pointing in +ve direction on axis.
*
* Some examples.
*
* ----------------------------------------------------------------------------------------------------------------------------------
* Example A) Assume that the hub is mounted on a sloped plate at the back of the robot, with the USB ports coming out the top of the hub.
* The plate is tilted UP 60 degrees from horizontal.
*
* To get the "Default" hub into this configuration you would just need a single rotation.
* 1) Rotate the Hub +60 degrees around the X axis to tilt up the front edge.
* 2) No rotation around the Y or Z axes.
*
* So the X,Y,Z rotations would be 60,0,0
*
* ----------------------------------------------------------------------------------------------------------------------------------
* Example B) Assume that the hub is laying flat on the chassis, but it has been twisted 30 degrees towards the right front wheel to make
* the USB cable accessible.
*
* To get the "Default" hub into this configuration you would just need a single rotation, but around a different axis.
* 1) No rotation around the X or Y axes.
* 1) Rotate the Hub -30 degrees (Clockwise) around the Z axis, since a positive angle would be Counter Clockwise.
*
* So the X,Y,Z rotations would be 0,0,-30
*
* ----------------------------------------------------------------------------------------------------------------------------------
* Example C) Assume that the hub is mounted on a vertical plate on the right side of the robot, with the Logo facing out, and the
* Hub rotated so that the USB ports are facing down 30 degrees towards the back wheels of the robot.
*
* To get the "Default" hub into this configuration will require several rotations.
* 1) Rotate the hub +90 degrees around the X axis to get it standing upright with the logo pointing backwards on the robot
* 2) Next, rotate the hub +90 around the Y axis to get it facing to the right.
* 3) Finally rotate the hub +120 degrees around the Z axis to take the USB ports from vertical to sloping down 30 degrees and
* facing towards the back of the robot.
*
* So the X,Y,Z rotations would be 90,90,120
*/
// The next three lines define the desired axis rotations.
// To Do: EDIT these values to match YOUR mounting configuration.
double xRotation = 0; // enter the desired X rotation angle here.
double yRotation = 0; // enter the desired Y rotation angle here.
double zRotation = 0; // enter the desired Z rotation angle here.
Orientation hubRotation = xyzOrientation(xRotation, yRotation, zRotation);
// Now initialize the IMU with this mounting orientation
RevHubOrientationOnRobot orientationOnRobot = new RevHubOrientationOnRobot(hubRotation);
imu.initialize(new IMU.Parameters(orientationOnRobot));
// Loop and update the dashboard
while (!isStopRequested()) {
telemetry.addData("Hub orientation", "X=%.1f, Y=%.1f, Z=%.1f \n", xRotation, yRotation, zRotation);
// Check to see if heading reset is requested
if (gamepad1.y) {
telemetry.addData("Yaw", "Resetting\n");
imu.resetYaw();
} else {
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n");
}
// Retrieve Rotational Angles and Velocities
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
telemetry.update();
}
}
}

View File

@ -0,0 +1,145 @@
/* Copyright (c) 2022 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
/**
* {@link SensorIMUOrthogonal} shows how to use the new universal {@link IMU} interface. This
* interface may be used with the BNO055 IMU or the BHI260 IMU. It assumes that an IMU is configured
* on the robot with the name "imu".
* <p>
* The sample will display the current Yaw, Pitch and Roll of the robot.<br>
* With the correct orientation parameters selected, pitch/roll/yaw should act as follows:
* <p>
* Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X) <br>
* Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y) <br>
* Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z) <br>
* <p>
* The yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller)
* <p>
* This specific sample assumes that the Hub is mounted on one of the three orthogonal planes
* (X/Y, X/Z or Y/Z) and that the Hub has only been rotated in a range of 90 degree increments.
* <p>
* Note: if your Hub is mounted on a surface angled at some non-90 Degree multiple (like 30) look at
* the alternative SensorImuNonOrthogonal sample in this folder.
* <p>
* This "Orthogonal" requirement means that:
* <p>
* 1) The Logo printed on the top of the Hub can ONLY be pointing in one of six directions:
* FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT.
* <p>
* 2) The USB ports can only be pointing in one of the same six directions:<br>
* FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT.
* <p>
* So, To fully define how your Hub is mounted to the robot, you must simply specify:<br>
* logoFacingDirection<br>
* usbFacingDirection
* <p>
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
* <p>
* Finally, choose the two correct parameters to define how your Hub is mounted and edit this OpMode
* to use those parameters.
*/
@TeleOp(name = "Sensor: IMU Orthogonal", group = "Sensor")
@Disabled // Comment this out to add to the OpMode list
public class SensorIMUOrthogonal extends LinearOpMode
{
// The IMU sensor object
IMU imu;
//----------------------------------------------------------------------------------------------
// Main logic
//----------------------------------------------------------------------------------------------
@Override public void runOpMode() throws InterruptedException {
// Retrieve and initialize the IMU.
// This sample expects the IMU to be in a REV Hub and named "imu".
imu = hardwareMap.get(IMU.class, "imu");
/* Define how the hub is mounted on the robot to get the correct Yaw, Pitch and Roll values.
*
* Two input parameters are required to fully specify the Orientation.
* The first parameter specifies the direction the printed logo on the Hub is pointing.
* The second parameter specifies the direction the USB connector on the Hub is pointing.
* All directions are relative to the robot, and left/right is as-viewed from behind the robot.
*/
/* The next two lines define Hub orientation.
* The Default Orientation (shown) is when a hub is mounted horizontally with the printed logo pointing UP and the USB port pointing FORWARD.
*
* To Do: EDIT these two lines to match YOUR mounting configuration.
*/
RevHubOrientationOnRobot.LogoFacingDirection logoDirection = RevHubOrientationOnRobot.LogoFacingDirection.UP;
RevHubOrientationOnRobot.UsbFacingDirection usbDirection = RevHubOrientationOnRobot.UsbFacingDirection.FORWARD;
RevHubOrientationOnRobot orientationOnRobot = new RevHubOrientationOnRobot(logoDirection, usbDirection);
// Now initialize the IMU with this mounting orientation
// Note: if you choose two conflicting directions, this initialization will cause a code exception.
imu.initialize(new IMU.Parameters(orientationOnRobot));
// Loop and update the dashboard
while (!isStopRequested()) {
telemetry.addData("Hub orientation", "Logo=%s USB=%s\n ", logoDirection, usbDirection);
// Check to see if heading reset is requested
if (gamepad1.y) {
telemetry.addData("Yaw", "Resetting\n");
imu.resetYaw();
} else {
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n");
}
// Retrieve Rotational Angles and Velocities
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
telemetry.update();
}
}
}

View File

@ -0,0 +1,129 @@
/*
* Copyright (c) 2023 FIRST
*
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior
* written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.util.Size;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import java.util.Locale;
/**
* This Utility OpMode helps calibrate a webcam or RC phone camera, useful for AprilTag pose estimation
* with the FTC VisionPortal. It captures a camera frame (image) and stores it on the Robot Controller
* (Control Hub or RC phone), with each press of the gamepad button X (or Square).
* Full calibration instructions are here:
*
* https://ftc-docs.firstinspires.org/camera-calibration
*
* In Android Studio, copy this class into your "teamcode" folder with a new name.
* Remove or comment out the @Disabled line to add this file to the Driver Station OpMode list.
*
* In OnBot Java, use "Add File" to add this file from the list of Samples.
*/
@TeleOp(name = "Utility: Camera Frame Capture", group = "Utility")
@Disabled
public class UtilityCameraFrameCapture extends LinearOpMode
{
/*
* EDIT THESE PARAMETERS AS NEEDED
*/
final boolean USING_WEBCAM = false;
final BuiltinCameraDirection INTERNAL_CAM_DIR = BuiltinCameraDirection.BACK;
final int RESOLUTION_WIDTH = 640;
final int RESOLUTION_HEIGHT = 480;
// Internal state
boolean lastX;
int frameCount;
long capReqTime;
@Override
public void runOpMode()
{
VisionPortal portal;
if (USING_WEBCAM)
{
portal = new VisionPortal.Builder()
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.setCameraResolution(new Size(RESOLUTION_WIDTH, RESOLUTION_HEIGHT))
.build();
}
else
{
portal = new VisionPortal.Builder()
.setCamera(INTERNAL_CAM_DIR)
.setCameraResolution(new Size(RESOLUTION_WIDTH, RESOLUTION_HEIGHT))
.build();
}
while (!isStopRequested())
{
boolean x = gamepad1.x;
if (x && !lastX)
{
portal.saveNextFrameRaw(String.format(Locale.US, "CameraFrameCapture-%06d", frameCount++));
capReqTime = System.currentTimeMillis();
}
lastX = x;
telemetry.addLine("######## Camera Capture Utility ########");
telemetry.addLine(String.format(Locale.US, " > Resolution: %dx%d", RESOLUTION_WIDTH, RESOLUTION_HEIGHT));
telemetry.addLine(" > Press X (or Square) to capture a frame");
telemetry.addData(" > Camera Status", portal.getCameraState());
if (capReqTime != 0)
{
telemetry.addLine("\nCaptured Frame!");
}
if (capReqTime != 0 && System.currentTimeMillis() - capReqTime > 1000)
{
capReqTime = 0;
}
telemetry.update();
}
}
}

View File

@ -625,8 +625,8 @@ public class FtcRobotControllerActivity extends Activity
}
/**
* Updates the orientation of monitorContainer (which contains cameraMonitorView and
* tfodMonitorView) based on the given configuration. Makes the children split the space.
* Updates the orientation of monitorContainer (which contains cameraMonitorView)
* based on the given configuration. Makes the children split the space.
*/
private void updateMonitorLayout(Configuration configuration) {
LinearLayout monitorContainer = (LinearLayout) findViewById(R.id.monitorContainer);

View File

@ -140,13 +140,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
android:layout_weight="1"
android:orientation="vertical"
/>
<FrameLayout
android:id="@+id/tfodMonitorViewId"
android:visibility="gone"
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1"
/>
</LinearLayout>

155
README.md
View File

@ -1,16 +1,21 @@
## NOTICE
This repository contains the public FTC SDK for the Freight Frenzy (2021-2022) competition season.
This repository contains the public FTC SDK for the POWERPLAY (2022-2023) competition season.
## Welcome!
This GitHub repository contains the source code that is used to build an Android app to control a *FIRST* Tech Challenge competition robot. To use this SDK, download/clone the entire project to your local computer.
## Requirements
To use this Android Studio project, you will need Android Studio 2021.2 (codename Chipmunk) or later.
To program your robot in Blocks or OnBot Java, you do not need Android Studio.
## Getting Started
If you are new to robotics or new to the *FIRST* Tech Challenge, then you should consider reviewing the [FTC Blocks Tutorial](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/Blocks-Tutorial) to get familiar with how to use the control system:
If you are new to robotics or new to the *FIRST* Tech Challenge, then you should consider reviewing the [FTC Blocks Tutorial](https://ftc-docs.firstinspires.org/programming_resources/blocks/Blocks-Tutorial.html) to get familiar with how to use the control system:
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[FTC Blocks Online Tutorial](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/Blocks-Tutorial)
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[FTC Blocks Online Tutorial](https://ftc-docs.firstinspires.org/programming_resources/blocks/Blocks-Tutorial.html)
Even if you are an advanced Java programmer, it is helpful to start with the [FTC Blocks tutorial](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/Blocks-Tutorial), and then migrate to the [OnBot Java Tool](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/OnBot-Java-Tutorial) or to [Android Studio](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/Android-Studio-Tutorial) afterwards.
Even if you are an advanced Java programmer, it is helpful to start with the [FTC Blocks tutorial](https://ftc-docs.firstinspires.org/programming_resources/blocks/Blocks-Tutorial.html), and then migrate to the [OnBot Java Tool](https://ftc-docs.firstinspires.org/programming_resources/onbot_java/OnBot-Java-Tutorial.html) or to [Android Studio](https://ftc-docs.firstinspires.org/programming_resources/android_studio_java/Android-Studio-Tutorial.html) afterwards.
## Downloading the Project
If you are an Android Studio programmer, there are several ways to download this repo. Note that if you use the Blocks or OnBot Java Tool to program your robot, then you do not need to download this repository.
@ -31,7 +36,7 @@ Once you have downloaded and uncompressed (if needed) your folder, you can use A
### User Documentation and Tutorials
*FIRST* maintains online documentation with information and tutorials on how to use the *FIRST* Tech Challenge software and robot control system. You can access this documentation using the following link:
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[FtcRobotController Online Documentation](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki)
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[FIRST Tech Challenge Documentation](https://ftc-docs.firstinspires.org/index.html)
Note that the online documentation is an "evergreen" document that is constantly being updated and edited. It contains the most current information about the *FIRST* Tech Challenge software and control system.
@ -41,9 +46,9 @@ The Javadoc reference documentation for the FTC SDK is now available online. Cl
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[FTC Javadoc Documentation](https://javadoc.io/doc/org.firstinspires.ftc)
### Online User Forum
For technical questions regarding the Control System or the FTC SDK, please visit the FTC Technology forum:
For technical questions regarding the Control System or the FTC SDK, please visit the FIRST Tech Challenge Community site:
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[FTC Technology Forum](https://ftcforum.firstinspires.org/forum/ftc-technology)
&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[FIRST Tech Challenge Community](https://ftc-community.firstinspires.org/)
### Sample OpModes
This project contains a large selection of Sample OpModes (robot code examples) which can be cut and pasted into your /teamcode folder to be used as-is, or modified to suit your team's needs.
@ -54,6 +59,138 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
# Release Information
## Version 8.2 (20230707-131020)
### Breaking changes
* Non-linear (iterative) Op Modes are no longer allowed to manipulate actuators in their `stop()` method. Attempts to do so will be ignored and logged.
* When an Op Mode attempts to illegally manipulate an actuator, the Robot Controller will print a log message
including the text `CANCELLED_FOR_SAFETY`.
* Additionally, LinearOpModes are no longer able to regain the ability to manipulate actuators by removing their
thread's interrupt or using another thread.
* Removes support for Android version 6.0 (Marshmallow). The minSdkVersion is now 24.
* Increases the Robocol version.
* This means an 8.2 or later Robot Controller or Driver Station will not be able to communicate with an 8.1 or earlier Driver Station or Robot Controller.
* If you forget to update both apps at the same time, an error message will be shown explaining which app is older and should be updated.
* FTC_FieldCoordinateSystemDefinition.pdf has been moved. It is still in the git history, but has been removed from the git snapshot corresponding with the 8.2 tag. The official version now lives at [Field Coordinate System](https://ftc-docs.firstinspires.org/field-coordinate-system).
* `LynxUsbDevice.addConfiguredModule()` and `LynxUsbDevice.getConfiguredModule()` have been replaced with `LynxUsbDevice.getOrAddModule()`.
### New features
* Adds new `VisionPortal` API for computer vision
* **This API may be subject to change for final kickoff release!**
* Several new samples added.
* Adds support for detecting AprilTags.
* `VisionPortal` is the new entry point for both AprilTag and TFOD processing.
* Vuforia will be removed in a future release.
* Updated TensorFlow dependencies.
* Added support for webcam camera controls to blocks.
* Previous blocks for Vuforia and TensorFlow Object Detection are obsolete.
* Related documentation for associated technologies can be found at
* [AprilTag Introduction](https://ftc-docs.firstinspires.org/apriltag-intro)
* [AprilTag SDK Guide](https://ftc-docs.firstinspires.org/apriltag-sdk)
* [AprilTag Detection Values](https://ftc-docs.firstinspires.org/apriltag-detection-values)
* [AprilTag Test Images](https://ftc-docs.firstinspires.org/apriltag-test-images)
* [Camera Calibration](https://ftc-docs.firstinspires.org/camera-calibration)
* Adds Driver Station support for Logitech Dual Action and Sony PS5 DualSense gamepads.
* This **does not** include support for the Sony PS5 DualSense Edge gamepad.
* Always refer to Game Manual 1 to determine gamepad legality in competition.
* Adds support for MJPEG payload streaming to UVC driver (external JPEG decompression routine required for use).
* Shows a hint on the Driver Station UI about how to bind a gamepad when buttons are pressed or the sticks are moved on an unbound gamepad.
* Adds option for fullscreening "Camera Stream" on Driver Station.
* OnBotJava source code is automatically saved as a ZIP file on every build with a rolling window of the last 30 builds kept; allows recovering source code from previous builds if code is accidentally deleted or corrupted.
* Adds support for changing the addresses of Expansion Hubs that are not connected directly via USB.
* The Expansion Hub Address Change screen now has an Apply button that changes the addresses without leaving the screen.
* Addresses that are assigned to other hubs connected to the same USB connection or Control Hub are no longer able to be selected.
* Increases maximum size of Blocks inline comments to 100 characters
* Saves position of open Blocks comment balloons
* Adds new AprilTag Driving samples: RobotDriveToAprilTagTank & RobotDriveToAprilTagOmni
* Adds Sample to illustrate optimizing camera exposure for AprilTags: ConceptAprilTagOptimizeExposure
### Bug Fixes
* Corrects inspection screen to report app version using the SDK version defined in the libraries instead of the version specified in `AndroidManifest.xml`. This corrects the case where the app could show matching versions numbers to the user but still state that the versions did not match.
* If the version specified in `AndroidManifest.xml` does not match the SDK version, an SDK version entry will be displayed on the Manage webpage.
* Fixes no error being displayed when saving a configuration file with duplicate names from the Driver Station.
* Fixes a deadlock in the UVC driver which manifested in https://github.com/OpenFTC/EasyOpenCV/issues/57.
* Fixes a deadlock in the UVC driver that could occur when hot-plugging cameras.
* Fixes UVC driver compatibility with Arducam OV9281 global shutter camera.
* Fixes Emergency Stop condition when an OnBotJava build with duplicate Op Mode names occurs.
* Fixes known causes of "Attempted use of a closed LynxModule instance" logspam.
* Fixes the visual identification LED pattern when configuring Expansion Hubs connected via RS-485.
## Version 8.1.1 (20221201-150726)
This is a bug fix only release to address the following four issues.
* [Issue #492](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/492) - Can't create new blocks opmodes.
* [Issue #495](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/495) - Remove the final modifier from the OpMode's Telemetry object.
* [Issue #500](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/500) - Some devices cannot be configured when the Driver Station app has been updated to 8.1
* Updating either the Robot Controller app or the Driver Station app to 8.1.1 or later will fix this issue.
* The Modern Robotics touch sensor was configurable as a Digital Device. It can only be used as an Analog Device.
## Version 8.1 (20221121-115119)
### Breaking Changes
* Deprecates the `OpMode` fields `msStuckDetectInit`, `msStuckDetectInitLoop`, `msStuckDetectStart`, `msStuckDetectLoop`, and `msStuckDetectStop`.
* Op Modes no longer have a time limit for `init()`, `init_loop()`, `start()` or `loop()`, so the fields corresponding to those methods are no longer used.
* `stop()` still has a time limit, but it is now hardcoded to be 1 second, and cannot be changed using `msStuckDetectStop`.
* Deprecates the `OpMode` methods `internalPreInit()`, `internalPostInitLoop()`, and `internalPostLoop()`.
* Iterative `OpMode`s will continue to call these methods in case they were overridden.
* These methods will not be called at all for `LinearOpMode`s.
* Deprecates (and stops respecting) `DeviceProperties.xmlTagAliases`.
### Enhancements
* Adds a new `IMU` interface to Blocks and Java that can be used with both the original BNO055 IMU
included in all older Control Hubs and Expansion Hubs, and the new alternative BHI260AP IMU.
* You can determine which type of IMU is in your Control Hub by navigating to the Manage page of the web interface.
* To learn how to use the new `IMU` interface, see https://ftc-docs.firstinspires.org/programming_resources/imu/imu.html. The `SensorIMU` Blocks sample was also updated to use the new `IMU` interface, and the following Java samples were added:
* `SensorIMUOrthogonal`
* Use this sample if your REV Hub is mounted so that it is parallel or perpendicular to the
bottom of your robot.
* `SensorIMUNonOrthogonal`
* Use this sample if your REV Hub is mounted to your robot in any other orientation
* `ConceptExploringIMUOrientations`
* This Op Mode is a tool to help you understand how the orthogonal orientations work, and
which one applies to your robot.
* The BHI260AP IMU can only be accessed via the new `IMU` interface. The BNO055 IMU can be
programmed using the new `IMU` interface, or you can continue to program it using the old `BNO055IMU`
interface. If you want to be able to quickly switch to a new Control Hub that may contain the
BHI260AP IMU, you should migrate your code to use the new `IMU` interface.
* Unlike the old `BNO055IMU` interface, which only worked correctly when the REV Hub was mounted flat
on your robot, the `IMU` interface allows you to specify the orientation of the REV Hub on your
robot. It will account for this, and give you your orientation in a Robot Coordinate System,
instead of a special coordinate system for the REV Hub. As a result, your pitch and yaw will be
0 when your *robot* is level, instead of when the REV Hub is level, which will result in much
more reliable orientation angle values for most mounting orientations.
* Because of the new robot-centric coordinate system, the pitch and roll angles returned by the
`IMU` interface will be different from the ones returned by the `BNO055IMU` interface. When you are
migrating your code, pay careful attention to the documentation.
* If you have calibrated your BNO055, you can provide that calibration data to the new `IMU`
interface by passing a `BNO055IMUNew.Parameters` instance to `IMU.initialize()`.
* The `IMU` interface is also suitable for implementation by third-party vendors for IMUs that
support providing the orientation in the form of a quaternion.
* Iterative `OpMode`s (as opposed to `LinearOpMode`s) now run on a dedicated thread.
* Cycle times should not be as impacted by everything else going on in the system.
* Slow `OpMode`s can no longer increase the amount of time it takes to process network commands, and vice versa.
* The `init()`, `init_loop()`, `start()` and `loop()` methods no longer need to return within a certain time frame.
* BNO055 IMU legacy driver: restores the ability to initialize in one Op Mode, and then have another Op Mode re-use that
initialization. This allows you to maintain the 0-yaw position between Op Modes, if desired.
* Allows customized versions of device drivers in the FTC SDK to use the same XML tag.
* Before, if you wanted to customize a device driver, you had to copy it to a new class _and_ give
it a new XML tag. Giving it a new XML tag meant that to switch which driver was being used, you
had to modify your configuration file.
* Now, to use your custom driver, all you have to do is specify your custom driver's class when
calling `hardwareMap.get()`. To go back to the original driver, specify the original driver
class. If you specify an interface that is implemented by both the original driver and the
custom driver, there is no guarantee about which implementation will be returned.
### Bug Fixes
* Fixes accessing the "Manage TensorFlow Lite Models" and "Manage Sounds" links and performing
Blocks and OnBotJava Op Mode downloads from the REV Hardware Client.
* Fixes issue where an I2C device driver would be auto-initialized using the parameters assigned in
a previous Op Mode run.
* Improves Driver Station popup menu placement in the landscape layout.
* Fixes NullPointerException when attempting to get a non-configured BNO055 IMU in a Blocks Op Mode on an RC phone.
* Fixes problem with Blocks if a variable is named `orientation`.
## Version 8.0 (20220907-131644)
### Breaking Changes
@ -88,8 +225,8 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
* The exception text in the popup window is both zoomable and scrollable just like a webpage.
* Pressing the "OK" button in the popup window will return to the main screen of the Driver Station and allow an Op Mode to be run again immediately, without the need to perform a "Restart Robot"
* Adds new Java sample to demonstrate using a hardware class to abstract robot actuators, and share them across multiple Op Modes.
* Sample Op Mode is [/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptExternalHardwareClass.java](ConceptExternalHardwareClass.java)
* Abstracted hardware class is [/FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotHardware.java](RobotHardware.java))
* Sample Op Mode is [ConceptExternalHardwareClass.java](FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptExternalHardwareClass.java)
* Abstracted hardware class is [RobotHardware.java](FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotHardware.java)
* Updates RobotAutoDriveByGyro_Linear Java sample to use REV Control/Expansion hub IMU.
* Updates Vuforia samples to reference PowerPlay assets and have correct names and field locations of image targets.
* Updates TensorFlow samples to reference PowerPlay assets.

View File

@ -17,9 +17,17 @@ apply from: '../build.dependencies.gradle'
android {
namespace = 'org.firstinspires.ftc.teamcode'
packagingOptions {
jniLibs.useLegacyPackaging true
}
}
dependencies {
implementation project(':FtcRobotController')
annotationProcessor files('lib/OpModeAnnotationProcessor.jar')
implementation 'org.apache.commons:commons-math3:3.6.1'
implementation 'com.fasterxml.jackson.core:jackson-databind:2.12.7'
implementation 'com.acmerobotics.roadrunner:core:0.5.6'
}

View File

@ -0,0 +1,94 @@
package org.firstinspires.ftc.teamcode.drive;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.PIDFCoefficients;
/*
* Constants shared between multiple drive types.
*
* TODO: Tune or adjust the following constants to fit your robot. Note that the non-final
* fields may also be edited through the dashboard (connect to the robot's WiFi network and
* navigate to https://192.168.49.1:8080/dash). Make sure to save the values here after you
* adjust them in the dashboard; **config variable changes don't persist between app restarts**.
*
* These are not the only parameters; some are located in the localizer classes, drive base classes,
* and op modes themselves.
*/
@Config
public class DriveConstants {
/*
* These are motor constants that should be listed online for your motors.
*/
public static final double TICKS_PER_REV = 1;
public static final double MAX_RPM = 1;
/*
* Set RUN_USING_ENCODER to true to enable built-in hub velocity control using drive encoders.
* Set this flag to false if drive encoders are not present and an alternative localization
* method is in use (e.g., tracking wheels).
*
* If using the built-in motor velocity PID, update MOTOR_VELO_PID with the tuned coefficients
* from DriveVelocityPIDTuner.
*/
public static final boolean RUN_USING_ENCODER = false;
public static PIDFCoefficients MOTOR_VELO_PID = new PIDFCoefficients(0, 0, 0,
getMotorVelocityF(MAX_RPM / 60 * TICKS_PER_REV));
/*
* These are physical constants that can be determined from your robot (including the track
* width; it will be tune empirically later although a rough estimate is important). Users are
* free to chose whichever linear distance unit they would like so long as it is consistently
* used. The default values were selected with inches in mind. Road runner uses radians for
* angular distances although most angular parameters are wrapped in Math.toRadians() for
* convenience. Make sure to exclude any gear ratio included in MOTOR_CONFIG from GEAR_RATIO.
*/
public static double WHEEL_RADIUS = 2; // in
public static double GEAR_RATIO = 1; // output (wheel) speed / input (motor) speed
public static double TRACK_WIDTH = 1; // in
/*
* These are the feedforward parameters used to model the drive motor behavior. If you are using
* the built-in velocity PID, *these values are fine as is*. However, if you do not have drive
* motor encoders or have elected not to use them for velocity control, these values should be
* empirically tuned.
*/
public static double kV = 1.0 / rpmToVelocity(MAX_RPM);
public static double kA = 0;
public static double kStatic = 0;
/*
* These values are used to generate the trajectories for you robot. To ensure proper operation,
* the constraints should never exceed ~80% of the robot's actual capabilities. While Road
* Runner is designed to enable faster autonomous motion, it is a good idea for testing to start
* small and gradually increase them later after everything is working. All distance units are
* inches.
*/
public static double MAX_VEL = 30;
public static double MAX_ACCEL = 30;
public static double MAX_ANG_VEL = Math.toRadians(60);
public static double MAX_ANG_ACCEL = Math.toRadians(60);
/*
* Adjust the orientations here to match your robot. See the FTC SDK documentation for details.
*/
public static RevHubOrientationOnRobot.LogoFacingDirection LOGO_FACING_DIR =
RevHubOrientationOnRobot.LogoFacingDirection.UP;
public static RevHubOrientationOnRobot.UsbFacingDirection USB_FACING_DIR =
RevHubOrientationOnRobot.UsbFacingDirection.FORWARD;
public static double encoderTicksToInches(double ticks) {
return WHEEL_RADIUS * 2 * Math.PI * GEAR_RATIO * ticks / TICKS_PER_REV;
}
public static double rpmToVelocity(double rpm) {
return rpm * GEAR_RATIO * 2 * Math.PI * WHEEL_RADIUS / 60.0;
}
public static double getMotorVelocityF(double ticksPerSecond) {
// see https://docs.google.com/document/d/1tyWrXDfMidwYyP_5H4mZyVgaEswhOC35gvdmP-V-5hA/edit#heading=h.61g9ixenznbx
return 32767 / ticksPerSecond;
}
}

View File

@ -0,0 +1,311 @@
package org.firstinspires.ftc.teamcode.drive;
import androidx.annotation.NonNull;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.roadrunner.control.PIDCoefficients;
import com.acmerobotics.roadrunner.drive.DriveSignal;
import com.acmerobotics.roadrunner.drive.MecanumDrive;
import com.acmerobotics.roadrunner.followers.HolonomicPIDVAFollower;
import com.acmerobotics.roadrunner.followers.TrajectoryFollower;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.acmerobotics.roadrunner.trajectory.TrajectoryBuilder;
import com.acmerobotics.roadrunner.trajectory.constraints.AngularVelocityConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.MecanumVelocityConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.MinVelocityConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.ProfileAccelerationConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.TrajectoryAccelerationConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.TrajectoryVelocityConstraint;
import com.qualcomm.hardware.lynx.LynxModule;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import com.qualcomm.robotcore.hardware.PIDFCoefficients;
import com.qualcomm.robotcore.hardware.VoltageSensor;
import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.trajectorysequence.TrajectorySequence;
import org.firstinspires.ftc.teamcode.trajectorysequence.TrajectorySequenceBuilder;
import org.firstinspires.ftc.teamcode.trajectorysequence.TrajectorySequenceRunner;
import org.firstinspires.ftc.teamcode.util.LynxModuleUtil;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_ACCEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_ANG_ACCEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_ANG_VEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_VEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MOTOR_VELO_PID;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.RUN_USING_ENCODER;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.TRACK_WIDTH;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.encoderTicksToInches;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.kA;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.kStatic;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.kV;
/*
* Simple mecanum drive hardware implementation for REV hardware.
*/
@Config
public class SampleMecanumDrive extends MecanumDrive {
public static PIDCoefficients TRANSLATIONAL_PID = new PIDCoefficients(0, 0, 0);
public static PIDCoefficients HEADING_PID = new PIDCoefficients(0, 0, 0);
public static double LATERAL_MULTIPLIER = 1;
public static double VX_WEIGHT = 1;
public static double VY_WEIGHT = 1;
public static double OMEGA_WEIGHT = 1;
private TrajectorySequenceRunner trajectorySequenceRunner;
private static final TrajectoryVelocityConstraint VEL_CONSTRAINT = getVelocityConstraint(MAX_VEL, MAX_ANG_VEL, TRACK_WIDTH);
private static final TrajectoryAccelerationConstraint ACCEL_CONSTRAINT = getAccelerationConstraint(MAX_ACCEL);
private TrajectoryFollower follower;
private DcMotorEx leftFront, leftRear, rightRear, rightFront;
private List<DcMotorEx> motors;
private IMU imu;
private VoltageSensor batteryVoltageSensor;
private List<Integer> lastEncPositions = new ArrayList<>();
private List<Integer> lastEncVels = new ArrayList<>();
public SampleMecanumDrive(HardwareMap hardwareMap) {
super(kV, kA, kStatic, TRACK_WIDTH, TRACK_WIDTH, LATERAL_MULTIPLIER);
follower = new HolonomicPIDVAFollower(TRANSLATIONAL_PID, TRANSLATIONAL_PID, HEADING_PID,
new Pose2d(0.5, 0.5, Math.toRadians(5.0)), 0.5);
LynxModuleUtil.ensureMinimumFirmwareVersion(hardwareMap);
batteryVoltageSensor = hardwareMap.voltageSensor.iterator().next();
for (LynxModule module : hardwareMap.getAll(LynxModule.class)) {
module.setBulkCachingMode(LynxModule.BulkCachingMode.AUTO);
}
// TODO: adjust the names of the following hardware devices to match your configuration
imu = hardwareMap.get(IMU.class, "imu");
IMU.Parameters parameters = new IMU.Parameters(new RevHubOrientationOnRobot(
DriveConstants.LOGO_FACING_DIR, DriveConstants.USB_FACING_DIR));
imu.initialize(parameters);
leftFront = hardwareMap.get(DcMotorEx.class, "leftFront");
leftRear = hardwareMap.get(DcMotorEx.class, "leftRear");
rightRear = hardwareMap.get(DcMotorEx.class, "rightRear");
rightFront = hardwareMap.get(DcMotorEx.class, "rightFront");
motors = Arrays.asList(leftFront, leftRear, rightRear, rightFront);
for (DcMotorEx motor : motors) {
MotorConfigurationType motorConfigurationType = motor.getMotorType().clone();
motorConfigurationType.setAchieveableMaxRPMFraction(1.0);
motor.setMotorType(motorConfigurationType);
}
if (RUN_USING_ENCODER) {
setMode(DcMotor.RunMode.RUN_USING_ENCODER);
}
setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
if (RUN_USING_ENCODER && MOTOR_VELO_PID != null) {
setPIDFCoefficients(DcMotor.RunMode.RUN_USING_ENCODER, MOTOR_VELO_PID);
}
// TODO: reverse any motors using DcMotor.setDirection()
List<Integer> lastTrackingEncPositions = new ArrayList<>();
List<Integer> lastTrackingEncVels = new ArrayList<>();
// TODO: if desired, use setLocalizer() to change the localization method
// setLocalizer(new StandardTrackingWheelLocalizer(hardwareMap, lastTrackingEncPositions, lastTrackingEncVels));
trajectorySequenceRunner = new TrajectorySequenceRunner(
follower, HEADING_PID, batteryVoltageSensor,
lastEncPositions, lastEncVels, lastTrackingEncPositions, lastTrackingEncVels
);
}
public TrajectoryBuilder trajectoryBuilder(Pose2d startPose) {
return new TrajectoryBuilder(startPose, VEL_CONSTRAINT, ACCEL_CONSTRAINT);
}
public TrajectoryBuilder trajectoryBuilder(Pose2d startPose, boolean reversed) {
return new TrajectoryBuilder(startPose, reversed, VEL_CONSTRAINT, ACCEL_CONSTRAINT);
}
public TrajectoryBuilder trajectoryBuilder(Pose2d startPose, double startHeading) {
return new TrajectoryBuilder(startPose, startHeading, VEL_CONSTRAINT, ACCEL_CONSTRAINT);
}
public TrajectorySequenceBuilder trajectorySequenceBuilder(Pose2d startPose) {
return new TrajectorySequenceBuilder(
startPose,
VEL_CONSTRAINT, ACCEL_CONSTRAINT,
MAX_ANG_VEL, MAX_ANG_ACCEL
);
}
public void turnAsync(double angle) {
trajectorySequenceRunner.followTrajectorySequenceAsync(
trajectorySequenceBuilder(getPoseEstimate())
.turn(angle)
.build()
);
}
public void turn(double angle) {
turnAsync(angle);
waitForIdle();
}
public void followTrajectoryAsync(Trajectory trajectory) {
trajectorySequenceRunner.followTrajectorySequenceAsync(
trajectorySequenceBuilder(trajectory.start())
.addTrajectory(trajectory)
.build()
);
}
public void followTrajectory(Trajectory trajectory) {
followTrajectoryAsync(trajectory);
waitForIdle();
}
public void followTrajectorySequenceAsync(TrajectorySequence trajectorySequence) {
trajectorySequenceRunner.followTrajectorySequenceAsync(trajectorySequence);
}
public void followTrajectorySequence(TrajectorySequence trajectorySequence) {
followTrajectorySequenceAsync(trajectorySequence);
waitForIdle();
}
public Pose2d getLastError() {
return trajectorySequenceRunner.getLastPoseError();
}
public void update() {
updatePoseEstimate();
DriveSignal signal = trajectorySequenceRunner.update(getPoseEstimate(), getPoseVelocity());
if (signal != null) setDriveSignal(signal);
}
public void waitForIdle() {
while (!Thread.currentThread().isInterrupted() && isBusy())
update();
}
public boolean isBusy() {
return trajectorySequenceRunner.isBusy();
}
public void setMode(DcMotor.RunMode runMode) {
for (DcMotorEx motor : motors) {
motor.setMode(runMode);
}
}
public void setZeroPowerBehavior(DcMotor.ZeroPowerBehavior zeroPowerBehavior) {
for (DcMotorEx motor : motors) {
motor.setZeroPowerBehavior(zeroPowerBehavior);
}
}
public void setPIDFCoefficients(DcMotor.RunMode runMode, PIDFCoefficients coefficients) {
PIDFCoefficients compensatedCoefficients = new PIDFCoefficients(
coefficients.p, coefficients.i, coefficients.d,
coefficients.f * 12 / batteryVoltageSensor.getVoltage()
);
for (DcMotorEx motor : motors) {
motor.setPIDFCoefficients(runMode, compensatedCoefficients);
}
}
public void setWeightedDrivePower(Pose2d drivePower) {
Pose2d vel = drivePower;
if (Math.abs(drivePower.getX()) + Math.abs(drivePower.getY())
+ Math.abs(drivePower.getHeading()) > 1) {
// re-normalize the powers according to the weights
double denom = VX_WEIGHT * Math.abs(drivePower.getX())
+ VY_WEIGHT * Math.abs(drivePower.getY())
+ OMEGA_WEIGHT * Math.abs(drivePower.getHeading());
vel = new Pose2d(
VX_WEIGHT * drivePower.getX(),
VY_WEIGHT * drivePower.getY(),
OMEGA_WEIGHT * drivePower.getHeading()
).div(denom);
}
setDrivePower(vel);
}
@NonNull
@Override
public List<Double> getWheelPositions() {
lastEncPositions.clear();
List<Double> wheelPositions = new ArrayList<>();
for (DcMotorEx motor : motors) {
int position = motor.getCurrentPosition();
lastEncPositions.add(position);
wheelPositions.add(encoderTicksToInches(position));
}
return wheelPositions;
}
@Override
public List<Double> getWheelVelocities() {
lastEncVels.clear();
List<Double> wheelVelocities = new ArrayList<>();
for (DcMotorEx motor : motors) {
int vel = (int) motor.getVelocity();
lastEncVels.add(vel);
wheelVelocities.add(encoderTicksToInches(vel));
}
return wheelVelocities;
}
@Override
public void setMotorPowers(double v, double v1, double v2, double v3) {
leftFront.setPower(v);
leftRear.setPower(v1);
rightRear.setPower(v2);
rightFront.setPower(v3);
}
@Override
public double getRawExternalHeading() {
return imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS);
}
@Override
public Double getExternalHeadingVelocity() {
return (double) imu.getRobotAngularVelocity(AngleUnit.RADIANS).zRotationRate;
}
public static TrajectoryVelocityConstraint getVelocityConstraint(double maxVel, double maxAngularVel, double trackWidth) {
return new MinVelocityConstraint(Arrays.asList(
new AngularVelocityConstraint(maxAngularVel),
new MecanumVelocityConstraint(maxVel, trackWidth)
));
}
public static TrajectoryAccelerationConstraint getAccelerationConstraint(double maxAccel) {
return new ProfileAccelerationConstraint(maxAccel);
}
}

View File

@ -0,0 +1,305 @@
package org.firstinspires.ftc.teamcode.drive;
import androidx.annotation.NonNull;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.roadrunner.control.PIDCoefficients;
import com.acmerobotics.roadrunner.drive.DriveSignal;
import com.acmerobotics.roadrunner.drive.TankDrive;
import com.acmerobotics.roadrunner.followers.TankPIDVAFollower;
import com.acmerobotics.roadrunner.followers.TrajectoryFollower;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.acmerobotics.roadrunner.trajectory.TrajectoryBuilder;
import com.acmerobotics.roadrunner.trajectory.constraints.AngularVelocityConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.MinVelocityConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.ProfileAccelerationConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.TankVelocityConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.TrajectoryAccelerationConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.TrajectoryVelocityConstraint;
import com.qualcomm.hardware.lynx.LynxModule;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import com.qualcomm.robotcore.hardware.IMU;
import com.qualcomm.robotcore.hardware.PIDFCoefficients;
import com.qualcomm.robotcore.hardware.VoltageSensor;
import com.qualcomm.robotcore.hardware.configuration.typecontainers.MotorConfigurationType;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.teamcode.trajectorysequence.TrajectorySequence;
import org.firstinspires.ftc.teamcode.trajectorysequence.TrajectorySequenceBuilder;
import org.firstinspires.ftc.teamcode.trajectorysequence.TrajectorySequenceRunner;
import org.firstinspires.ftc.teamcode.util.LynxModuleUtil;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_ACCEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_ANG_ACCEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_ANG_VEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_VEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MOTOR_VELO_PID;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.RUN_USING_ENCODER;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.TRACK_WIDTH;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.encoderTicksToInches;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.kA;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.kStatic;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.kV;
/*
* Simple tank drive hardware implementation for REV hardware.
*/
@Config
public class SampleTankDrive extends TankDrive {
public static PIDCoefficients AXIAL_PID = new PIDCoefficients(0, 0, 0);
public static PIDCoefficients CROSS_TRACK_PID = new PIDCoefficients(0, 0, 0);
public static PIDCoefficients HEADING_PID = new PIDCoefficients(0, 0, 0);
public static double VX_WEIGHT = 1;
public static double OMEGA_WEIGHT = 1;
private TrajectorySequenceRunner trajectorySequenceRunner;
private static final TrajectoryVelocityConstraint VEL_CONSTRAINT = getVelocityConstraint(MAX_VEL, MAX_ANG_VEL, TRACK_WIDTH);
private static final TrajectoryAccelerationConstraint accelConstraint = getAccelerationConstraint(MAX_ACCEL);
private TrajectoryFollower follower;
private List<DcMotorEx> motors, leftMotors, rightMotors;
private IMU imu;
private VoltageSensor batteryVoltageSensor;
public SampleTankDrive(HardwareMap hardwareMap) {
super(kV, kA, kStatic, TRACK_WIDTH);
follower = new TankPIDVAFollower(AXIAL_PID, CROSS_TRACK_PID,
new Pose2d(0.5, 0.5, Math.toRadians(5.0)), 0.5);
LynxModuleUtil.ensureMinimumFirmwareVersion(hardwareMap);
batteryVoltageSensor = hardwareMap.voltageSensor.iterator().next();
for (LynxModule module : hardwareMap.getAll(LynxModule.class)) {
module.setBulkCachingMode(LynxModule.BulkCachingMode.AUTO);
}
// TODO: adjust the names of the following hardware devices to match your configuration
imu = hardwareMap.get(IMU.class, "imu");
IMU.Parameters parameters = new IMU.Parameters(new RevHubOrientationOnRobot(
DriveConstants.LOGO_FACING_DIR, DriveConstants.USB_FACING_DIR));
imu.initialize(parameters);
// add/remove motors depending on your robot (e.g., 6WD)
DcMotorEx leftFront = hardwareMap.get(DcMotorEx.class, "leftFront");
DcMotorEx leftRear = hardwareMap.get(DcMotorEx.class, "leftRear");
DcMotorEx rightRear = hardwareMap.get(DcMotorEx.class, "rightRear");
DcMotorEx rightFront = hardwareMap.get(DcMotorEx.class, "rightFront");
motors = Arrays.asList(leftFront, leftRear, rightRear, rightFront);
leftMotors = Arrays.asList(leftFront, leftRear);
rightMotors = Arrays.asList(rightFront, rightRear);
for (DcMotorEx motor : motors) {
MotorConfigurationType motorConfigurationType = motor.getMotorType().clone();
motorConfigurationType.setAchieveableMaxRPMFraction(1.0);
motor.setMotorType(motorConfigurationType);
}
if (RUN_USING_ENCODER) {
setMode(DcMotor.RunMode.RUN_USING_ENCODER);
}
setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
if (RUN_USING_ENCODER && MOTOR_VELO_PID != null) {
setPIDFCoefficients(DcMotor.RunMode.RUN_USING_ENCODER, MOTOR_VELO_PID);
}
// TODO: reverse any motors using DcMotor.setDirection()
// TODO: if desired, use setLocalizer() to change the localization method
// for instance, setLocalizer(new ThreeTrackingWheelLocalizer(...));
trajectorySequenceRunner = new TrajectorySequenceRunner(
follower, HEADING_PID, batteryVoltageSensor,
new ArrayList<>(), new ArrayList<>(), new ArrayList<>(), new ArrayList<>()
);
}
public TrajectoryBuilder trajectoryBuilder(Pose2d startPose) {
return new TrajectoryBuilder(startPose, VEL_CONSTRAINT, accelConstraint);
}
public TrajectoryBuilder trajectoryBuilder(Pose2d startPose, boolean reversed) {
return new TrajectoryBuilder(startPose, reversed, VEL_CONSTRAINT, accelConstraint);
}
public TrajectoryBuilder trajectoryBuilder(Pose2d startPose, double startHeading) {
return new TrajectoryBuilder(startPose, startHeading, VEL_CONSTRAINT, accelConstraint);
}
public TrajectorySequenceBuilder trajectorySequenceBuilder(Pose2d startPose) {
return new TrajectorySequenceBuilder(
startPose,
VEL_CONSTRAINT, accelConstraint,
MAX_ANG_VEL, MAX_ANG_ACCEL
);
}
public void turnAsync(double angle) {
trajectorySequenceRunner.followTrajectorySequenceAsync(
trajectorySequenceBuilder(getPoseEstimate())
.turn(angle)
.build()
);
}
public void turn(double angle) {
turnAsync(angle);
waitForIdle();
}
public void followTrajectoryAsync(Trajectory trajectory) {
trajectorySequenceRunner.followTrajectorySequenceAsync(
trajectorySequenceBuilder(trajectory.start())
.addTrajectory(trajectory)
.build()
);
}
public void followTrajectory(Trajectory trajectory) {
followTrajectoryAsync(trajectory);
waitForIdle();
}
public void followTrajectorySequenceAsync(TrajectorySequence trajectorySequence) {
trajectorySequenceRunner.followTrajectorySequenceAsync(trajectorySequence);
}
public void followTrajectorySequence(TrajectorySequence trajectorySequence) {
followTrajectorySequenceAsync(trajectorySequence);
waitForIdle();
}
public Pose2d getLastError() {
return trajectorySequenceRunner.getLastPoseError();
}
public void update() {
updatePoseEstimate();
DriveSignal signal = trajectorySequenceRunner.update(getPoseEstimate(), getPoseVelocity());
if (signal != null) setDriveSignal(signal);
}
public void waitForIdle() {
while (!Thread.currentThread().isInterrupted() && isBusy())
update();
}
public boolean isBusy() {
return trajectorySequenceRunner.isBusy();
}
public void setMode(DcMotor.RunMode runMode) {
for (DcMotorEx motor : motors) {
motor.setMode(runMode);
}
}
public void setZeroPowerBehavior(DcMotor.ZeroPowerBehavior zeroPowerBehavior) {
for (DcMotorEx motor : motors) {
motor.setZeroPowerBehavior(zeroPowerBehavior);
}
}
public void setPIDFCoefficients(DcMotor.RunMode runMode, PIDFCoefficients coefficients) {
PIDFCoefficients compensatedCoefficients = new PIDFCoefficients(
coefficients.p, coefficients.i, coefficients.d,
coefficients.f * 12 / batteryVoltageSensor.getVoltage()
);
for (DcMotorEx motor : motors) {
motor.setPIDFCoefficients(runMode, compensatedCoefficients);
}
}
public void setWeightedDrivePower(Pose2d drivePower) {
Pose2d vel = drivePower;
if (Math.abs(drivePower.getX()) + Math.abs(drivePower.getHeading()) > 1) {
// re-normalize the powers according to the weights
double denom = VX_WEIGHT * Math.abs(drivePower.getX())
+ OMEGA_WEIGHT * Math.abs(drivePower.getHeading());
vel = new Pose2d(
VX_WEIGHT * drivePower.getX(),
0,
OMEGA_WEIGHT * drivePower.getHeading()
).div(denom);
} else {
// Ensure the y axis is zeroed out.
vel = new Pose2d(drivePower.getX(), 0, drivePower.getHeading());
}
setDrivePower(vel);
}
@NonNull
@Override
public List<Double> getWheelPositions() {
double leftSum = 0, rightSum = 0;
for (DcMotorEx leftMotor : leftMotors) {
leftSum += encoderTicksToInches(leftMotor.getCurrentPosition());
}
for (DcMotorEx rightMotor : rightMotors) {
rightSum += encoderTicksToInches(rightMotor.getCurrentPosition());
}
return Arrays.asList(leftSum / leftMotors.size(), rightSum / rightMotors.size());
}
public List<Double> getWheelVelocities() {
double leftSum = 0, rightSum = 0;
for (DcMotorEx leftMotor : leftMotors) {
leftSum += encoderTicksToInches(leftMotor.getVelocity());
}
for (DcMotorEx rightMotor : rightMotors) {
rightSum += encoderTicksToInches(rightMotor.getVelocity());
}
return Arrays.asList(leftSum / leftMotors.size(), rightSum / rightMotors.size());
}
@Override
public void setMotorPowers(double v, double v1) {
for (DcMotorEx leftMotor : leftMotors) {
leftMotor.setPower(v);
}
for (DcMotorEx rightMotor : rightMotors) {
rightMotor.setPower(v1);
}
}
@Override
public double getRawExternalHeading() {
return imu.getRobotYawPitchRollAngles().getYaw(AngleUnit.RADIANS);
}
@Override
public Double getExternalHeadingVelocity() {
return (double) imu.getRobotAngularVelocity(AngleUnit.RADIANS).zRotationRate;
}
public static TrajectoryVelocityConstraint getVelocityConstraint(double maxVel, double maxAngularVel, double trackWidth) {
return new MinVelocityConstraint(Arrays.asList(
new AngularVelocityConstraint(maxAngularVel),
new TankVelocityConstraint(maxVel, trackWidth)
));
}
public static TrajectoryAccelerationConstraint getAccelerationConstraint(double maxAccel) {
return new ProfileAccelerationConstraint(maxAccel);
}
}

View File

@ -0,0 +1,99 @@
package org.firstinspires.ftc.teamcode.drive;
import androidx.annotation.NonNull;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.localization.ThreeTrackingWheelLocalizer;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.teamcode.util.Encoder;
import java.util.Arrays;
import java.util.List;
/*
* Sample tracking wheel localizer implementation assuming the standard configuration:
*
* /--------------\
* | ____ |
* | ---- |
* | || || |
* | || || |
* | |
* | |
* \--------------/
*
*/
@Config
public class StandardTrackingWheelLocalizer extends ThreeTrackingWheelLocalizer {
public static double TICKS_PER_REV = 0;
public static double WHEEL_RADIUS = 2; // in
public static double GEAR_RATIO = 1; // output (wheel) speed / input (encoder) speed
public static double LATERAL_DISTANCE = 10; // in; distance between the left and right wheels
public static double FORWARD_OFFSET = 4; // in; offset of the lateral wheel
private Encoder leftEncoder, rightEncoder, frontEncoder;
private List<Integer> lastEncPositions, lastEncVels;
public StandardTrackingWheelLocalizer(HardwareMap hardwareMap, List<Integer> lastTrackingEncPositions, List<Integer> lastTrackingEncVels) {
super(Arrays.asList(
new Pose2d(0, LATERAL_DISTANCE / 2, 0), // left
new Pose2d(0, -LATERAL_DISTANCE / 2, 0), // right
new Pose2d(FORWARD_OFFSET, 0, Math.toRadians(90)) // front
));
lastEncPositions = lastTrackingEncPositions;
lastEncVels = lastTrackingEncVels;
leftEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "leftEncoder"));
rightEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "rightEncoder"));
frontEncoder = new Encoder(hardwareMap.get(DcMotorEx.class, "frontEncoder"));
// TODO: reverse any encoders using Encoder.setDirection(Encoder.Direction.REVERSE)
}
public static double encoderTicksToInches(double ticks) {
return WHEEL_RADIUS * 2 * Math.PI * GEAR_RATIO * ticks / TICKS_PER_REV;
}
@NonNull
@Override
public List<Double> getWheelPositions() {
int leftPos = leftEncoder.getCurrentPosition();
int rightPos = rightEncoder.getCurrentPosition();
int frontPos = frontEncoder.getCurrentPosition();
lastEncPositions.clear();
lastEncPositions.add(leftPos);
lastEncPositions.add(rightPos);
lastEncPositions.add(frontPos);
return Arrays.asList(
encoderTicksToInches(leftPos),
encoderTicksToInches(rightPos),
encoderTicksToInches(frontPos)
);
}
@NonNull
@Override
public List<Double> getWheelVelocities() {
int leftVel = (int) leftEncoder.getCorrectedVelocity();
int rightVel = (int) rightEncoder.getCorrectedVelocity();
int frontVel = (int) frontEncoder.getCorrectedVelocity();
lastEncVels.clear();
lastEncVels.add(leftVel);
lastEncVels.add(rightVel);
lastEncVels.add(frontVel);
return Arrays.asList(
encoderTicksToInches(leftVel),
encoderTicksToInches(rightVel),
encoderTicksToInches(frontVel)
);
}
}

View File

@ -0,0 +1,221 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_RPM;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.RUN_USING_ENCODER;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.rpmToVelocity;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.util.NanoClock;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.util.RobotLog;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.robotcore.internal.system.Misc;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
import org.firstinspires.ftc.teamcode.util.LoggingUtil;
import org.firstinspires.ftc.teamcode.util.RegressionUtil;
import java.util.ArrayList;
import java.util.List;
/*
* Op mode for computing kV, kStatic, and kA from various drive routines. For the curious, here's an
* outline of the procedure:
* 1. Slowly ramp the motor power and record encoder values along the way.
* 2. Run a linear regression on the encoder velocity vs. motor power plot to obtain a slope (kV)
* and an optional intercept (kStatic).
* 3. Accelerate the robot (apply constant power) and record the encoder counts.
* 4. Adjust the encoder data based on the velocity tuning data and find kA with another linear
* regression.
*/
@Config
@Autonomous(group = "drive")
public class AutomaticFeedforwardTuner extends LinearOpMode {
public static double MAX_POWER = 0.7;
public static double DISTANCE = 100; // in
@Override
public void runOpMode() throws InterruptedException {
if (RUN_USING_ENCODER) {
RobotLog.setGlobalErrorMsg("Feedforward constants usually don't need to be tuned " +
"when using the built-in drive motor velocity PID.");
}
Telemetry telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
NanoClock clock = NanoClock.system();
telemetry.addLine("Press play to begin the feedforward tuning routine");
telemetry.update();
waitForStart();
if (isStopRequested()) return;
telemetry.clearAll();
telemetry.addLine("Would you like to fit kStatic?");
telemetry.addLine("Press (Y/Δ) for yes, (B/O) for no");
telemetry.update();
boolean fitIntercept = false;
while (!isStopRequested()) {
if (gamepad1.y) {
fitIntercept = true;
while (!isStopRequested() && gamepad1.y) {
idle();
}
break;
} else if (gamepad1.b) {
while (!isStopRequested() && gamepad1.b) {
idle();
}
break;
}
idle();
}
telemetry.clearAll();
telemetry.addLine(Misc.formatInvariant(
"Place your robot on the field with at least %.2f in of room in front", DISTANCE));
telemetry.addLine("Press (Y/Δ) to begin");
telemetry.update();
while (!isStopRequested() && !gamepad1.y) {
idle();
}
while (!isStopRequested() && gamepad1.y) {
idle();
}
telemetry.clearAll();
telemetry.addLine("Running...");
telemetry.update();
double maxVel = rpmToVelocity(MAX_RPM);
double finalVel = MAX_POWER * maxVel;
double accel = (finalVel * finalVel) / (2.0 * DISTANCE);
double rampTime = Math.sqrt(2.0 * DISTANCE / accel);
List<Double> timeSamples = new ArrayList<>();
List<Double> positionSamples = new ArrayList<>();
List<Double> powerSamples = new ArrayList<>();
drive.setPoseEstimate(new Pose2d());
double startTime = clock.seconds();
while (!isStopRequested()) {
double elapsedTime = clock.seconds() - startTime;
if (elapsedTime > rampTime) {
break;
}
double vel = accel * elapsedTime;
double power = vel / maxVel;
timeSamples.add(elapsedTime);
positionSamples.add(drive.getPoseEstimate().getX());
powerSamples.add(power);
drive.setDrivePower(new Pose2d(power, 0.0, 0.0));
drive.updatePoseEstimate();
}
drive.setDrivePower(new Pose2d(0.0, 0.0, 0.0));
RegressionUtil.RampResult rampResult = RegressionUtil.fitRampData(
timeSamples, positionSamples, powerSamples, fitIntercept,
LoggingUtil.getLogFile(Misc.formatInvariant(
"DriveRampRegression-%d.csv", System.currentTimeMillis())));
telemetry.clearAll();
telemetry.addLine("Quasi-static ramp up test complete");
if (fitIntercept) {
telemetry.addLine(Misc.formatInvariant("kV = %.5f, kStatic = %.5f (R^2 = %.2f)",
rampResult.kV, rampResult.kStatic, rampResult.rSquare));
} else {
telemetry.addLine(Misc.formatInvariant("kV = %.5f (R^2 = %.2f)",
rampResult.kStatic, rampResult.rSquare));
}
telemetry.addLine("Would you like to fit kA?");
telemetry.addLine("Press (Y/Δ) for yes, (B/O) for no");
telemetry.update();
boolean fitAccelFF = false;
while (!isStopRequested()) {
if (gamepad1.y) {
fitAccelFF = true;
while (!isStopRequested() && gamepad1.y) {
idle();
}
break;
} else if (gamepad1.b) {
while (!isStopRequested() && gamepad1.b) {
idle();
}
break;
}
idle();
}
if (fitAccelFF) {
telemetry.clearAll();
telemetry.addLine("Place the robot back in its starting position");
telemetry.addLine("Press (Y/Δ) to continue");
telemetry.update();
while (!isStopRequested() && !gamepad1.y) {
idle();
}
while (!isStopRequested() && gamepad1.y) {
idle();
}
telemetry.clearAll();
telemetry.addLine("Running...");
telemetry.update();
double maxPowerTime = DISTANCE / maxVel;
timeSamples.clear();
positionSamples.clear();
powerSamples.clear();
drive.setPoseEstimate(new Pose2d());
drive.setDrivePower(new Pose2d(MAX_POWER, 0.0, 0.0));
startTime = clock.seconds();
while (!isStopRequested()) {
double elapsedTime = clock.seconds() - startTime;
if (elapsedTime > maxPowerTime) {
break;
}
timeSamples.add(elapsedTime);
positionSamples.add(drive.getPoseEstimate().getX());
powerSamples.add(MAX_POWER);
drive.updatePoseEstimate();
}
drive.setDrivePower(new Pose2d(0.0, 0.0, 0.0));
RegressionUtil.AccelResult accelResult = RegressionUtil.fitAccelData(
timeSamples, positionSamples, powerSamples, rampResult,
LoggingUtil.getLogFile(Misc.formatInvariant(
"DriveAccelRegression-%d.csv", System.currentTimeMillis())));
telemetry.clearAll();
telemetry.addLine("Constant power test complete");
telemetry.addLine(Misc.formatInvariant("kA = %.5f (R^2 = %.2f)",
accelResult.kA, accelResult.rSquare));
telemetry.update();
}
while (!isStopRequested()) {
idle();
}
}
}

View File

@ -0,0 +1,52 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
/*
* Op mode for preliminary tuning of the follower PID coefficients (located in the drive base
* classes). The robot drives back and forth in a straight line indefinitely. Utilization of the
* dashboard is recommended for this tuning routine. To access the dashboard, connect your computer
* to the RC's WiFi network. In your browser, navigate to https://192.168.49.1:8080/dash if you're
* using the RC phone or https://192.168.43.1:8080/dash if you are using the Control Hub. Once
* you've successfully connected, start the program, and your robot will begin moving forward and
* backward. You should observe the target position (green) and your pose estimate (blue) and adjust
* your follower PID coefficients such that you follow the target position as accurately as possible.
* If you are using SampleMecanumDrive, you should be tuning TRANSLATIONAL_PID and HEADING_PID.
* If you are using SampleTankDrive, you should be tuning AXIAL_PID, CROSS_TRACK_PID, and HEADING_PID.
* These coefficients can be tuned live in dashboard.
*
* This opmode is designed as a convenient, coarse tuning for the follower PID coefficients. It
* is recommended that you use the FollowerPIDTuner opmode for further fine tuning.
*/
@Config
@Autonomous(group = "drive")
public class BackAndForth extends LinearOpMode {
public static double DISTANCE = 50;
@Override
public void runOpMode() throws InterruptedException {
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
Trajectory trajectoryForward = drive.trajectoryBuilder(new Pose2d())
.forward(DISTANCE)
.build();
Trajectory trajectoryBackward = drive.trajectoryBuilder(trajectoryForward.end())
.back(DISTANCE)
.build();
waitForStart();
while (opModeIsActive() && !isStopRequested()) {
drive.followTrajectory(trajectoryForward);
drive.followTrajectory(trajectoryBackward);
}
}
}

View File

@ -0,0 +1,171 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_ACCEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_VEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MOTOR_VELO_PID;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.RUN_USING_ENCODER;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.kV;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.profile.MotionProfile;
import com.acmerobotics.roadrunner.profile.MotionProfileGenerator;
import com.acmerobotics.roadrunner.profile.MotionState;
import com.acmerobotics.roadrunner.util.NanoClock;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.RobotLog;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
import java.util.List;
/*
* This routine is designed to tune the PID coefficients used by the REV Expansion Hubs for closed-
* loop velocity control. Although it may seem unnecessary, tuning these coefficients is just as
* important as the positional parameters. Like the other manual tuning routines, this op mode
* relies heavily upon the dashboard. To access the dashboard, connect your computer to the RC's
* WiFi network. In your browser, navigate to https://192.168.49.1:8080/dash if you're using the RC
* phone or https://192.168.43.1:8080/dash if you are using the Control Hub. Once you've successfully
* connected, start the program, and your robot will begin moving forward and backward according to
* a motion profile. Your job is to graph the velocity errors over time and adjust the PID
* coefficients (note: the tuning variable will not appear until the op mode finishes initializing).
* Once you've found a satisfactory set of gains, add them to the DriveConstants.java file under the
* MOTOR_VELO_PID field.
*
* Recommended tuning process:
*
* 1. Increase kP until any phase lag is eliminated. Concurrently increase kD as necessary to
* mitigate oscillations.
* 2. Add kI (or adjust kF) until the steady state/constant velocity plateaus are reached.
* 3. Back off kP and kD a little until the response is less oscillatory (but without lag).
*
* Pressing Y/Δ (Xbox/PS4) will pause the tuning process and enter driver override, allowing the
* user to reset the position of the bot in the event that it drifts off the path.
* Pressing B/O (Xbox/PS4) will cede control back to the tuning process.
*/
@Config
@Autonomous(group = "drive")
public class DriveVelocityPIDTuner extends LinearOpMode {
public static double DISTANCE = 72; // in
enum Mode {
DRIVER_MODE,
TUNING_MODE
}
private static MotionProfile generateProfile(boolean movingForward) {
MotionState start = new MotionState(movingForward ? 0 : DISTANCE, 0, 0, 0);
MotionState goal = new MotionState(movingForward ? DISTANCE : 0, 0, 0, 0);
return MotionProfileGenerator.generateSimpleMotionProfile(start, goal, MAX_VEL, MAX_ACCEL);
}
@Override
public void runOpMode() {
if (!RUN_USING_ENCODER) {
RobotLog.setGlobalErrorMsg("%s does not need to be run if the built-in motor velocity" +
"PID is not in use", getClass().getSimpleName());
}
Telemetry telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
Mode mode = Mode.TUNING_MODE;
double lastKp = MOTOR_VELO_PID.p;
double lastKi = MOTOR_VELO_PID.i;
double lastKd = MOTOR_VELO_PID.d;
double lastKf = MOTOR_VELO_PID.f;
drive.setPIDFCoefficients(DcMotor.RunMode.RUN_USING_ENCODER, MOTOR_VELO_PID);
NanoClock clock = NanoClock.system();
telemetry.addLine("Ready!");
telemetry.update();
telemetry.clearAll();
waitForStart();
if (isStopRequested()) return;
boolean movingForwards = true;
MotionProfile activeProfile = generateProfile(true);
double profileStart = clock.seconds();
while (!isStopRequested()) {
telemetry.addData("mode", mode);
switch (mode) {
case TUNING_MODE:
if (gamepad1.y) {
mode = Mode.DRIVER_MODE;
drive.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
}
// calculate and set the motor power
double profileTime = clock.seconds() - profileStart;
if (profileTime > activeProfile.duration()) {
// generate a new profile
movingForwards = !movingForwards;
activeProfile = generateProfile(movingForwards);
profileStart = clock.seconds();
}
MotionState motionState = activeProfile.get(profileTime);
double targetPower = kV * motionState.getV();
drive.setDrivePower(new Pose2d(targetPower, 0, 0));
List<Double> velocities = drive.getWheelVelocities();
// update telemetry
telemetry.addData("targetVelocity", motionState.getV());
for (int i = 0; i < velocities.size(); i++) {
telemetry.addData("measuredVelocity" + i, velocities.get(i));
telemetry.addData(
"error" + i,
motionState.getV() - velocities.get(i)
);
}
break;
case DRIVER_MODE:
if (gamepad1.b) {
drive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
mode = Mode.TUNING_MODE;
movingForwards = true;
activeProfile = generateProfile(movingForwards);
profileStart = clock.seconds();
}
drive.setWeightedDrivePower(
new Pose2d(
-gamepad1.left_stick_y,
-gamepad1.left_stick_x,
-gamepad1.right_stick_x
)
);
break;
}
if (lastKp != MOTOR_VELO_PID.p || lastKd != MOTOR_VELO_PID.d
|| lastKi != MOTOR_VELO_PID.i || lastKf != MOTOR_VELO_PID.f) {
drive.setPIDFCoefficients(DcMotor.RunMode.RUN_USING_ENCODER, MOTOR_VELO_PID);
lastKp = MOTOR_VELO_PID.p;
lastKi = MOTOR_VELO_PID.i;
lastKd = MOTOR_VELO_PID.d;
lastKf = MOTOR_VELO_PID.f;
}
telemetry.update();
}
}
}

View File

@ -0,0 +1,55 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
import org.firstinspires.ftc.teamcode.trajectorysequence.TrajectorySequence;
/*
* Op mode for preliminary tuning of the follower PID coefficients (located in the drive base
* classes). The robot drives in a DISTANCE-by-DISTANCE square indefinitely. Utilization of the
* dashboard is recommended for this tuning routine. To access the dashboard, connect your computer
* to the RC's WiFi network. In your browser, navigate to https://192.168.49.1:8080/dash if you're
* using the RC phone or https://192.168.43.1:8080/dash if you are using the Control Hub. Once
* you've successfully connected, start the program, and your robot will begin driving in a square.
* You should observe the target position (green) and your pose estimate (blue) and adjust your
* follower PID coefficients such that you follow the target position as accurately as possible.
* If you are using SampleMecanumDrive, you should be tuning TRANSLATIONAL_PID and HEADING_PID.
* If you are using SampleTankDrive, you should be tuning AXIAL_PID, CROSS_TRACK_PID, and HEADING_PID.
* These coefficients can be tuned live in dashboard.
*/
@Config
@Autonomous(group = "drive")
public class FollowerPIDTuner extends LinearOpMode {
public static double DISTANCE = 48; // in
@Override
public void runOpMode() throws InterruptedException {
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
Pose2d startPose = new Pose2d(-DISTANCE / 2, -DISTANCE / 2, 0);
drive.setPoseEstimate(startPose);
waitForStart();
if (isStopRequested()) return;
while (!isStopRequested()) {
TrajectorySequence trajSeq = drive.trajectorySequenceBuilder(startPose)
.forward(DISTANCE)
.turn(Math.toRadians(90))
.forward(DISTANCE)
.turn(Math.toRadians(90))
.forward(DISTANCE)
.turn(Math.toRadians(90))
.forward(DISTANCE)
.turn(Math.toRadians(90))
.build();
drive.followTrajectorySequence(trajSeq);
}
}
}

View File

@ -0,0 +1,45 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DcMotor;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
/**
* This is a simple teleop routine for testing localization. Drive the robot around like a normal
* teleop routine and make sure the robot's estimated pose matches the robot's actual pose (slight
* errors are not out of the ordinary, especially with sudden drive motions). The goal of this
* exercise is to ascertain whether the localizer has been configured properly (note: the pure
* encoder localizer heading may be significantly off if the track width has not been tuned).
*/
@TeleOp(group = "drive")
public class LocalizationTest extends LinearOpMode {
@Override
public void runOpMode() throws InterruptedException {
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
drive.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
waitForStart();
while (!isStopRequested()) {
drive.setWeightedDrivePower(
new Pose2d(
-gamepad1.left_stick_y,
-gamepad1.left_stick_x,
-gamepad1.right_stick_x
)
);
drive.update();
Pose2d poseEstimate = drive.getPoseEstimate();
telemetry.addData("x", poseEstimate.getX());
telemetry.addData("y", poseEstimate.getY());
telemetry.addData("heading", poseEstimate.getHeading());
telemetry.update();
}
}
}

View File

@ -0,0 +1,154 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_ACCEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.MAX_VEL;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.RUN_USING_ENCODER;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.kA;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.kStatic;
import static org.firstinspires.ftc.teamcode.drive.DriveConstants.kV;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.acmerobotics.roadrunner.drive.DriveSignal;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.kinematics.Kinematics;
import com.acmerobotics.roadrunner.profile.MotionProfile;
import com.acmerobotics.roadrunner.profile.MotionProfileGenerator;
import com.acmerobotics.roadrunner.profile.MotionState;
import com.acmerobotics.roadrunner.util.NanoClock;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.VoltageSensor;
import com.qualcomm.robotcore.util.RobotLog;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.drive.DriveConstants;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
import java.util.Objects;
/*
* This routine is designed to tune the open-loop feedforward coefficients. Although it may seem unnecessary,
* tuning these coefficients is just as important as the positional parameters. Like the other
* manual tuning routines, this op mode relies heavily upon the dashboard. To access the dashboard,
* connect your computer to the RC's WiFi network. In your browser, navigate to
* https://192.168.49.1:8080/dash if you're using the RC phone or https://192.168.43.1:8080/dash if
* you are using the Control Hub. Once you've successfully connected, start the program, and your
* robot will begin moving forward and backward according to a motion profile. Your job is to graph
* the velocity errors over time and adjust the feedforward coefficients. Once you've found a
* satisfactory set of gains, add them to the appropriate fields in the DriveConstants.java file.
*
* Pressing Y/Δ (Xbox/PS4) will pause the tuning process and enter driver override, allowing the
* user to reset the position of the bot in the event that it drifts off the path.
* Pressing B/O (Xbox/PS4) will cede control back to the tuning process.
*/
@Config
@Autonomous(group = "drive")
public class ManualFeedforwardTuner extends LinearOpMode {
public static double DISTANCE = 72; // in
private FtcDashboard dashboard = FtcDashboard.getInstance();
private SampleMecanumDrive drive;
enum Mode {
DRIVER_MODE,
TUNING_MODE
}
private Mode mode;
private static MotionProfile generateProfile(boolean movingForward) {
MotionState start = new MotionState(movingForward ? 0 : DISTANCE, 0, 0, 0);
MotionState goal = new MotionState(movingForward ? DISTANCE : 0, 0, 0, 0);
return MotionProfileGenerator.generateSimpleMotionProfile(start, goal, MAX_VEL, MAX_ACCEL);
}
@Override
public void runOpMode() {
if (RUN_USING_ENCODER) {
RobotLog.setGlobalErrorMsg("Feedforward constants usually don't need to be tuned " +
"when using the built-in drive motor velocity PID.");
}
Telemetry telemetry = new MultipleTelemetry(this.telemetry, dashboard.getTelemetry());
drive = new SampleMecanumDrive(hardwareMap);
final VoltageSensor voltageSensor = hardwareMap.voltageSensor.iterator().next();
mode = Mode.TUNING_MODE;
NanoClock clock = NanoClock.system();
telemetry.addLine("Ready!");
telemetry.update();
telemetry.clearAll();
waitForStart();
if (isStopRequested()) return;
boolean movingForwards = true;
MotionProfile activeProfile = generateProfile(true);
double profileStart = clock.seconds();
while (!isStopRequested()) {
telemetry.addData("mode", mode);
switch (mode) {
case TUNING_MODE:
if (gamepad1.y) {
mode = Mode.DRIVER_MODE;
}
// calculate and set the motor power
double profileTime = clock.seconds() - profileStart;
if (profileTime > activeProfile.duration()) {
// generate a new profile
movingForwards = !movingForwards;
activeProfile = generateProfile(movingForwards);
profileStart = clock.seconds();
}
MotionState motionState = activeProfile.get(profileTime);
double targetPower = Kinematics.calculateMotorFeedforward(motionState.getV(), motionState.getA(), kV, kA, kStatic);
final double NOMINAL_VOLTAGE = 12.0;
final double voltage = voltageSensor.getVoltage();
drive.setDrivePower(new Pose2d(NOMINAL_VOLTAGE / voltage * targetPower, 0, 0));
drive.updatePoseEstimate();
Pose2d poseVelo = Objects.requireNonNull(drive.getPoseVelocity(), "poseVelocity() must not be null. Ensure that the getWheelVelocities() method has been overridden in your localizer.");
double currentVelo = poseVelo.getX();
// update telemetry
telemetry.addData("targetVelocity", motionState.getV());
telemetry.addData("measuredVelocity", currentVelo);
telemetry.addData("error", motionState.getV() - currentVelo);
break;
case DRIVER_MODE:
if (gamepad1.b) {
mode = Mode.TUNING_MODE;
movingForwards = true;
activeProfile = generateProfile(movingForwards);
profileStart = clock.seconds();
}
drive.setWeightedDrivePower(
new Pose2d(
-gamepad1.left_stick_y,
-gamepad1.left_stick_x,
-gamepad1.right_stick_x
)
);
break;
}
telemetry.update();
}
}
}

View File

@ -0,0 +1,73 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
import java.util.Objects;
/**
* This routine is designed to calculate the maximum angular velocity your bot can achieve under load.
* <p>
* Upon pressing start, your bot will turn at max power for RUNTIME seconds.
* <p>
* Further fine tuning of MAX_ANG_VEL may be desired.
*/
@Config
@Autonomous(group = "drive")
public class MaxAngularVeloTuner extends LinearOpMode {
public static double RUNTIME = 4.0;
private ElapsedTime timer;
private double maxAngVelocity = 0.0;
@Override
public void runOpMode() throws InterruptedException {
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
drive.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
Telemetry telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetry.addLine("Your bot will turn at full speed for " + RUNTIME + " seconds.");
telemetry.addLine("Please ensure you have enough space cleared.");
telemetry.addLine("");
telemetry.addLine("Press start when ready.");
telemetry.update();
waitForStart();
telemetry.clearAll();
telemetry.update();
drive.setDrivePower(new Pose2d(0, 0, 1));
timer = new ElapsedTime();
while (!isStopRequested() && timer.seconds() < RUNTIME) {
drive.updatePoseEstimate();
Pose2d poseVelo = Objects.requireNonNull(drive.getPoseVelocity(), "poseVelocity() must not be null. Ensure that the getWheelVelocities() method has been overridden in your localizer.");
maxAngVelocity = Math.max(poseVelo.getHeading(), maxAngVelocity);
}
drive.setDrivePower(new Pose2d());
telemetry.addData("Max Angular Velocity (rad)", maxAngVelocity);
telemetry.addData("Max Angular Velocity (deg)", Math.toDegrees(maxAngVelocity));
telemetry.addData("Max Recommended Angular Velocity (rad)", maxAngVelocity * 0.8);
telemetry.addData("Max Recommended Angular Velocity (deg)", Math.toDegrees(maxAngVelocity * 0.8));
telemetry.update();
while (!isStopRequested()) idle();
}
}

View File

@ -0,0 +1,84 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.VoltageSensor;
import com.qualcomm.robotcore.util.ElapsedTime;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.drive.DriveConstants;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
import java.util.Objects;
/**
* This routine is designed to calculate the maximum velocity your bot can achieve under load. It
* will also calculate the effective kF value for your velocity PID.
* <p>
* Upon pressing start, your bot will run at max power for RUNTIME seconds.
* <p>
* Further fine tuning of kF may be desired.
*/
@Config
@Autonomous(group = "drive")
public class MaxVelocityTuner extends LinearOpMode {
public static double RUNTIME = 2.0;
private ElapsedTime timer;
private double maxVelocity = 0.0;
private VoltageSensor batteryVoltageSensor;
@Override
public void runOpMode() throws InterruptedException {
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
drive.setMode(DcMotor.RunMode.RUN_WITHOUT_ENCODER);
batteryVoltageSensor = hardwareMap.voltageSensor.iterator().next();
Telemetry telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
telemetry.addLine("Your bot will go at full speed for " + RUNTIME + " seconds.");
telemetry.addLine("Please ensure you have enough space cleared.");
telemetry.addLine("");
telemetry.addLine("Press start when ready.");
telemetry.update();
waitForStart();
telemetry.clearAll();
telemetry.update();
drive.setDrivePower(new Pose2d(1, 0, 0));
timer = new ElapsedTime();
while (!isStopRequested() && timer.seconds() < RUNTIME) {
drive.updatePoseEstimate();
Pose2d poseVelo = Objects.requireNonNull(drive.getPoseVelocity(), "poseVelocity() must not be null. Ensure that the getWheelVelocities() method has been overridden in your localizer.");
maxVelocity = Math.max(poseVelo.vec().norm(), maxVelocity);
}
drive.setDrivePower(new Pose2d());
double effectiveKf = DriveConstants.getMotorVelocityF(veloInchesToTicks(maxVelocity));
telemetry.addData("Max Velocity", maxVelocity);
telemetry.addData("Max Recommended Velocity", maxVelocity * 0.8);
telemetry.addData("Voltage Compensated kF", effectiveKf * batteryVoltageSensor.getVoltage() / 12);
telemetry.update();
while (!isStopRequested() && opModeIsActive()) idle();
}
private double veloInchesToTicks(double inchesPerSec) {
return inchesPerSec / (2 * Math.PI * DriveConstants.WHEEL_RADIUS) / DriveConstants.GEAR_RATIO * DriveConstants.TICKS_PER_REV;
}
}

View File

@ -0,0 +1,93 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
/**
* This is a simple teleop routine for debugging your motor configuration.
* Pressing each of the buttons will power its respective motor.
*
* Button Mappings:
*
* Xbox/PS4 Button - Motor
* X / ▢ - Front Left
* Y / Δ - Front Right
* B / O - Rear Right
* A / X - Rear Left
* The buttons are mapped to match the wheels spatially if you
* were to rotate the gamepad 45deg°. x/square is the front left
* ________ and each button corresponds to the wheel as you go clockwise
* / ______ \
* ------------.-' _ '-..+ Front of Bot
* / _ ( Y ) _ \ ^
* | ( X ) _ ( B ) | Front Left \ Front Right
* ___ '. ( A ) /| Wheel \ Wheel
* .' '. '-._____.-' .' (x/▢) \ (Y/Δ)
* | | | \
* '.___.' '. | Rear Left \ Rear Right
* '. / Wheel \ Wheel
* \. .' (A/X) \ (B/O)
* \________/
*
* Uncomment the @Disabled tag below to use this opmode.
*/
@Disabled
@Config
@TeleOp(group = "drive")
public class MotorDirectionDebugger extends LinearOpMode {
public static double MOTOR_POWER = 0.7;
@Override
public void runOpMode() throws InterruptedException {
Telemetry telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
telemetry.addLine("Press play to begin the debugging opmode");
telemetry.update();
waitForStart();
if (isStopRequested()) return;
telemetry.clearAll();
telemetry.setDisplayFormat(Telemetry.DisplayFormat.HTML);
while (!isStopRequested()) {
telemetry.addLine("Press each button to turn on its respective motor");
telemetry.addLine();
telemetry.addLine("<font face=\"monospace\">Xbox/PS4 Button - Motor</font>");
telemetry.addLine("<font face=\"monospace\">&nbsp;&nbsp;X / ▢&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;- Front Left</font>");
telemetry.addLine("<font face=\"monospace\">&nbsp;&nbsp;Y / Δ&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;- Front Right</font>");
telemetry.addLine("<font face=\"monospace\">&nbsp;&nbsp;B / O&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;- Rear&nbsp;&nbsp;Right</font>");
telemetry.addLine("<font face=\"monospace\">&nbsp;&nbsp;A / X&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;- Rear&nbsp;&nbsp;Left</font>");
telemetry.addLine();
if(gamepad1.x) {
drive.setMotorPowers(MOTOR_POWER, 0, 0, 0);
telemetry.addLine("Running Motor: Front Left");
} else if(gamepad1.y) {
drive.setMotorPowers(0, 0, 0, MOTOR_POWER);
telemetry.addLine("Running Motor: Front Right");
} else if(gamepad1.b) {
drive.setMotorPowers(0, 0, MOTOR_POWER, 0);
telemetry.addLine("Running Motor: Rear Right");
} else if(gamepad1.a) {
drive.setMotorPowers(0, MOTOR_POWER, 0, 0);
telemetry.addLine("Running Motor: Rear Left");
} else {
drive.setMotorPowers(0, 0, 0, 0);
telemetry.addLine("Running Motor: None");
}
telemetry.update();
}
}
}

View File

@ -0,0 +1,38 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.geometry.Vector2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
/*
* This is an example of a more complex path to really test the tuning.
*/
@Autonomous(group = "drive")
public class SplineTest extends LinearOpMode {
@Override
public void runOpMode() throws InterruptedException {
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
waitForStart();
if (isStopRequested()) return;
Trajectory traj = drive.trajectoryBuilder(new Pose2d())
.splineTo(new Vector2d(30, 30), 0)
.build();
drive.followTrajectory(traj);
sleep(2000);
drive.followTrajectory(
drive.trajectoryBuilder(traj.end(), true)
.splineTo(new Vector2d(0, 0), Math.toRadians(180))
.build()
);
}
}

View File

@ -0,0 +1,46 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
/*
* This is a simple routine to test translational drive capabilities.
*/
@Config
@Autonomous(group = "drive")
public class StrafeTest extends LinearOpMode {
public static double DISTANCE = 60; // in
@Override
public void runOpMode() throws InterruptedException {
Telemetry telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
Trajectory trajectory = drive.trajectoryBuilder(new Pose2d())
.strafeRight(DISTANCE)
.build();
waitForStart();
if (isStopRequested()) return;
drive.followTrajectory(trajectory);
Pose2d poseEstimate = drive.getPoseEstimate();
telemetry.addData("finalX", poseEstimate.getX());
telemetry.addData("finalY", poseEstimate.getY());
telemetry.addData("finalHeading", poseEstimate.getHeading());
telemetry.update();
while (!isStopRequested() && opModeIsActive()) ;
}
}

View File

@ -0,0 +1,46 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
/*
* This is a simple routine to test translational drive capabilities.
*/
@Config
@Autonomous(group = "drive")
public class StraightTest extends LinearOpMode {
public static double DISTANCE = 60; // in
@Override
public void runOpMode() throws InterruptedException {
Telemetry telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
Trajectory trajectory = drive.trajectoryBuilder(new Pose2d())
.forward(DISTANCE)
.build();
waitForStart();
if (isStopRequested()) return;
drive.followTrajectory(trajectory);
Pose2d poseEstimate = drive.getPoseEstimate();
telemetry.addData("finalX", poseEstimate.getX());
telemetry.addData("finalY", poseEstimate.getY());
telemetry.addData("finalHeading", poseEstimate.getHeading());
telemetry.update();
while (!isStopRequested() && opModeIsActive()) ;
}
}

View File

@ -0,0 +1,88 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.util.Angle;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.util.MovingStatistics;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.robotcore.internal.system.Misc;
import org.firstinspires.ftc.teamcode.drive.DriveConstants;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
/*
* This routine determines the effective track width. The procedure works by executing a point turn
* with a given angle and measuring the difference between that angle and the actual angle (as
* indicated by an external IMU/gyro, track wheels, or some other localizer). The quotient
* given angle / actual angle gives a multiplicative adjustment to the estimated track width
* (effective track width = estimated track width * given angle / actual angle). The routine repeats
* this procedure a few times and averages the values for additional accuracy. Note: a relatively
* accurate track width estimate is important or else the angular constraints will be thrown off.
*/
@Config
@Autonomous(group = "drive")
public class TrackWidthTuner extends LinearOpMode {
public static double ANGLE = 180; // deg
public static int NUM_TRIALS = 5;
public static int DELAY = 1000; // ms
@Override
public void runOpMode() throws InterruptedException {
Telemetry telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
// TODO: if you haven't already, set the localizer to something that doesn't depend on
// drive encoders for computing the heading
telemetry.addLine("Press play to begin the track width tuner routine");
telemetry.addLine("Make sure your robot has enough clearance to turn smoothly");
telemetry.update();
waitForStart();
if (isStopRequested()) return;
telemetry.clearAll();
telemetry.addLine("Running...");
telemetry.update();
MovingStatistics trackWidthStats = new MovingStatistics(NUM_TRIALS);
for (int i = 0; i < NUM_TRIALS; i++) {
drive.setPoseEstimate(new Pose2d());
// it is important to handle heading wraparounds
double headingAccumulator = 0;
double lastHeading = 0;
drive.turnAsync(Math.toRadians(ANGLE));
while (!isStopRequested() && drive.isBusy()) {
double heading = drive.getPoseEstimate().getHeading();
headingAccumulator += Angle.normDelta(heading - lastHeading);
lastHeading = heading;
drive.update();
}
double trackWidth = DriveConstants.TRACK_WIDTH * Math.toRadians(ANGLE) / headingAccumulator;
trackWidthStats.add(trackWidth);
sleep(DELAY);
}
telemetry.clearAll();
telemetry.addLine("Tuning complete");
telemetry.addLine(Misc.formatInvariant("Effective track width = %.2f (SE = %.3f)",
trackWidthStats.getMean(),
trackWidthStats.getStandardDeviation() / Math.sqrt(NUM_TRIALS)));
telemetry.update();
while (!isStopRequested()) {
idle();
}
}
}

View File

@ -0,0 +1,104 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.MultipleTelemetry;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.util.Angle;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.util.MovingStatistics;
import com.qualcomm.robotcore.util.RobotLog;
import org.firstinspires.ftc.robotcore.external.Telemetry;
import org.firstinspires.ftc.robotcore.internal.system.Misc;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
import org.firstinspires.ftc.teamcode.drive.StandardTrackingWheelLocalizer;
/**
* This routine determines the effective forward offset for the lateral tracking wheel.
* The procedure executes a point turn at a given angle for a certain number of trials,
* along with a specified delay in milliseconds. The purpose of this is to track the
* change in the y position during the turn. The offset, or distance, of the lateral tracking
* wheel from the center or rotation allows the wheel to spin during a point turn, leading
* to an incorrect measurement for the y position. This creates an arc around around
* the center of rotation with an arc length of change in y and a radius equal to the forward
* offset. We can compute this offset by calculating (change in y position) / (change in heading)
* which returns the radius if the angle (change in heading) is in radians. This is based
* on the arc length formula of length = theta * radius.
*
* To run this routine, simply adjust the desired angle and specify the number of trials
* and the desired delay. Then, run the procedure. Once it finishes, it will print the
* average of all the calculated forward offsets derived from the calculation. This calculated
* forward offset is then added onto the current forward offset to produce an overall estimate
* for the forward offset. You can run this procedure as many times as necessary until a
* satisfactory result is produced.
*/
@Config
@Autonomous(group="drive")
public class TrackingWheelForwardOffsetTuner extends LinearOpMode {
public static double ANGLE = 180; // deg
public static int NUM_TRIALS = 5;
public static int DELAY = 1000; // ms
@Override
public void runOpMode() throws InterruptedException {
Telemetry telemetry = new MultipleTelemetry(this.telemetry, FtcDashboard.getInstance().getTelemetry());
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
if (!(drive.getLocalizer() instanceof StandardTrackingWheelLocalizer)) {
RobotLog.setGlobalErrorMsg("StandardTrackingWheelLocalizer is not being set in the "
+ "drive class. Ensure that \"setLocalizer(new StandardTrackingWheelLocalizer"
+ "(hardwareMap));\" is called in SampleMecanumDrive.java");
}
telemetry.addLine("Press play to begin the forward offset tuner");
telemetry.addLine("Make sure your robot has enough clearance to turn smoothly");
telemetry.update();
waitForStart();
if (isStopRequested()) return;
telemetry.clearAll();
telemetry.addLine("Running...");
telemetry.update();
MovingStatistics forwardOffsetStats = new MovingStatistics(NUM_TRIALS);
for (int i = 0; i < NUM_TRIALS; i++) {
drive.setPoseEstimate(new Pose2d());
// it is important to handle heading wraparounds
double headingAccumulator = 0;
double lastHeading = 0;
drive.turnAsync(Math.toRadians(ANGLE));
while (!isStopRequested() && drive.isBusy()) {
double heading = drive.getPoseEstimate().getHeading();
headingAccumulator += Angle.norm(heading - lastHeading);
lastHeading = heading;
drive.update();
}
double forwardOffset = StandardTrackingWheelLocalizer.FORWARD_OFFSET +
drive.getPoseEstimate().getY() / headingAccumulator;
forwardOffsetStats.add(forwardOffset);
sleep(DELAY);
}
telemetry.clearAll();
telemetry.addLine("Tuning complete");
telemetry.addLine(Misc.formatInvariant("Effective forward offset = %.2f (SE = %.3f)",
forwardOffsetStats.getMean(),
forwardOffsetStats.getStandardDeviation() / Math.sqrt(NUM_TRIALS)));
telemetry.update();
while (!isStopRequested()) {
idle();
}
}
}

View File

@ -0,0 +1,130 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.util.Angle;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.util.RobotLog;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
import org.firstinspires.ftc.teamcode.drive.StandardTrackingWheelLocalizer;
/**
* Opmode designed to assist the user in tuning the `StandardTrackingWheelLocalizer`'s
* LATERAL_DISTANCE value. The LATERAL_DISTANCE is the center-to-center distance of the parallel
* wheels.
*
* Tuning Routine:
*
* 1. Set the LATERAL_DISTANCE value in StandardTrackingWheelLocalizer.java to the physical
* measured value. This need only be an estimated value as you will be tuning it anyways.
*
* 2. Make a mark on the bot (with a piece of tape or sharpie or however you wish) and make an
* similar mark right below the indicator on your bot. This will be your reference point to
* ensure you've turned exactly 360°.
*
* 3. Although not entirely necessary, having the bot's pose being drawn in dashbooard does help
* identify discrepancies in the LATERAL_DISTANCE value. To access the dashboard,
* connect your computer to the RC's WiFi network. In your browser, navigate to
* https://192.168.49.1:8080/dash if you're using the RC phone or https://192.168.43.1:8080/dash
* if you are using the Control Hub.
* Ensure the field is showing (select the field view in top right of the page).
*
* 4. Press play to begin the tuning routine.
*
* 5. Use the right joystick on gamepad 1 to turn the bot counterclockwise.
*
* 6. Spin the bot 10 times, counterclockwise. Make sure to keep track of these turns.
*
* 7. Once the bot has finished spinning 10 times, press A to finishing the routine. The indicators
* on the bot and on the ground you created earlier should be lined up.
*
* 8. Your effective LATERAL_DISTANCE will be given. Stick this value into your
* StandardTrackingWheelLocalizer.java class.
*
* 9. If this value is incorrect, run the routine again while adjusting the LATERAL_DISTANCE value
* yourself. Read the heading output and follow the advice stated in the note below to manually
* nudge the values yourself.
*
* Note:
* It helps to pay attention to how the pose on the field is drawn in dashboard. A blue circle with
* a line from the circumference to the center should be present, representing the bot. The line
* indicates forward. If your LATERAL_DISTANCE value is tuned currently, the pose drawn in
* dashboard should keep track with the pose of your actual bot. If the drawn bot turns slower than
* the actual bot, the LATERAL_DISTANCE should be decreased. If the drawn bot turns faster than the
* actual bot, the LATERAL_DISTANCE should be increased.
*
* If your drawn bot oscillates around a point in dashboard, don't worry. This is because the
* position of the perpendicular wheel isn't perfectly set and causes a discrepancy in the
* effective center of rotation. You can ignore this effect. The center of rotation will be offset
* slightly but your heading will still be fine. This does not affect your overall tracking
* precision. The heading should still line up.
*/
@Config
@TeleOp(group = "drive")
public class TrackingWheelLateralDistanceTuner extends LinearOpMode {
public static int NUM_TURNS = 10;
@Override
public void runOpMode() throws InterruptedException {
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
if (!(drive.getLocalizer() instanceof StandardTrackingWheelLocalizer)) {
RobotLog.setGlobalErrorMsg("StandardTrackingWheelLocalizer is not being set in the "
+ "drive class. Ensure that \"setLocalizer(new StandardTrackingWheelLocalizer"
+ "(hardwareMap));\" is called in SampleMecanumDrive.java");
}
telemetry.addLine("Prior to beginning the routine, please read the directions "
+ "located in the comments of the opmode file.");
telemetry.addLine("Press play to begin the tuning routine.");
telemetry.addLine("");
telemetry.addLine("Press Y/△ to stop the routine.");
telemetry.update();
waitForStart();
if (isStopRequested()) return;
telemetry.clearAll();
telemetry.update();
double headingAccumulator = 0;
double lastHeading = 0;
boolean tuningFinished = false;
while (!isStopRequested() && !tuningFinished) {
Pose2d vel = new Pose2d(0, 0, -gamepad1.right_stick_x);
drive.setDrivePower(vel);
drive.update();
double heading = drive.getPoseEstimate().getHeading();
double deltaHeading = heading - lastHeading;
headingAccumulator += Angle.normDelta(deltaHeading);
lastHeading = heading;
telemetry.clearAll();
telemetry.addLine("Total Heading (deg): " + Math.toDegrees(headingAccumulator));
telemetry.addLine("Raw Heading (deg): " + Math.toDegrees(heading));
telemetry.addLine();
telemetry.addLine("Press Y/△ to conclude routine");
telemetry.update();
if (gamepad1.y)
tuningFinished = true;
}
telemetry.clearAll();
telemetry.addLine("Localizer's total heading: " + Math.toDegrees(headingAccumulator) + "°");
telemetry.addLine("Effective LATERAL_DISTANCE: " +
(headingAccumulator / (NUM_TURNS * Math.PI * 2)) * StandardTrackingWheelLocalizer.LATERAL_DISTANCE);
telemetry.update();
while (!isStopRequested()) idle();
}
}

View File

@ -0,0 +1,27 @@
package org.firstinspires.ftc.teamcode.drive.opmode;
import com.acmerobotics.dashboard.config.Config;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
/*
* This is a simple routine to test turning capabilities.
*/
@Config
@Autonomous(group = "drive")
public class TurnTest extends LinearOpMode {
public static double ANGLE = 90; // deg
@Override
public void runOpMode() throws InterruptedException {
SampleMecanumDrive drive = new SampleMecanumDrive(hardwareMap);
waitForStart();
if (isStopRequested()) return;
drive.turn(Math.toRadians(ANGLE));
}
}

View File

@ -0,0 +1,154 @@
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
import com.qualcomm.robotcore.hardware.Gamepad;
import com.qualcomm.robotcore.util.ElapsedTime;
@TeleOp( name = "ChassisControl")
public class ChassisControl extends OpMode {
public double axial;
public double lateral;
public double yaw;
DcMotor frontRight;
DcMotor backRight;
DcMotor frontLeft;
DcMotor backLeft;
public ElapsedTime runtime = new ElapsedTime();
/**
* this function takes a long milliseconds parameter and sleeps
* @param millis milliseconds to sleep
*/
public void sleepmillis(long millis) {
try {
Thread.sleep(millis);
} catch (Exception e) {
}
}
/**
* stops all drive motors
*/
public void off() {
frontRight.setPower(0);
backRight.setPower(0);
frontLeft.setPower(0);
backLeft.setPower(0);
}
/**
* User defined init method
* This method will be called once when the INIT button is pressed.
*/
public void init() {
telemetry.addData("Status","In Init()");
telemetry.update();
frontRight = hardwareMap.dcMotor.get("Drive front rt");
backRight = hardwareMap.dcMotor.get("Drive back rt");
frontLeft = hardwareMap.dcMotor.get("Drive front lt");
backLeft = hardwareMap.dcMotor.get("Drive back lt");
}
/**
* User defined init_loop method
* This method will be called repeatedly when the INIT button is pressed.
* This method is optional. By default this method takes no action.
*/
public void init_loop(){
// Wait for the game to start (driver presses PLAY)
telemetry.addData("Status", "Initialized");
telemetry.update();
}
/**
* User defined start method.
* This method will be called once when the PLAY button is first pressed.
* This method is optional. By default this method takes not action. Example usage: Starting another thread.
*/
public void start() {
}
/**
* User defined stop method
* This method will be called when this op mode is first disabled.
* The stop method is optional. By default this method takes no action.
*/
public void stop(){
}
/**
* User defined loop method.
* This method will be called repeatedly in a loop while this op mode is running
*/
double num = 1;
String speed = "";
public void loop() {
frontLeft.setDirection(DcMotor.Direction.REVERSE);
backLeft.setDirection(DcMotor.Direction.REVERSE);
frontRight.setDirection(DcMotor.Direction.FORWARD);
backRight.setDirection(DcMotor.Direction.REVERSE);
if(gamepad1.a){
num = 3;
speed = "slow";
}
if(gamepad1.b){
num = 2.5;
speed = "medium";
}
if(gamepad1.y){
num = 2;
speed = "fast";
}
if(gamepad1.x){
num = 1.5;
speed = "Ludicrous";
}
if(gamepad1.x && gamepad1.y){
num = 1;
speed = "plaid";
}
axial = -gamepad1.left_stick_y/num; // Note: pushing stick forward gives negative value
lateral = gamepad1.left_stick_x/num;
yaw = gamepad1.right_stick_x/(num+0.5);
// Combine the joystick requests for each axis-motion to determine each wheel's power.
// Set up a variable for each drive wheel to save the power level for telemetry.
double leftFrontPower = axial + lateral + yaw;
double rightFrontPower = axial - lateral - yaw;
double leftBackPower = axial - lateral + yaw;
double rightBackPower = axial + lateral - yaw;
// Normalize the values so no wheel power exceeds 100%
// This ensures that the robot maintains the desired motion.
double max = Math.max(Math.abs(leftFrontPower), Math.abs(rightFrontPower));
max = Math.max(max, Math.abs(leftBackPower));
max = Math.max(max, Math.abs(rightBackPower));
if (max > 1.0) {
leftFrontPower /= max;
rightFrontPower /= max;
leftBackPower /= max;
rightBackPower /= max;
}
frontLeft.setPower(leftFrontPower);
frontRight.setPower(rightFrontPower);
backLeft.setPower(leftBackPower);
backRight.setPower(rightBackPower);
// Show the elapsed game time and wheel power
telemetry.addData("Status", "Run Time: " + runtime.toString());
telemetry.addData("Front left, Right", "%4.2f, %4.2f", leftFrontPower, rightFrontPower);
telemetry.addData("Back left, Right", "%4.2f, %4.2f", leftBackPower, rightBackPower);
telemetry.addData("Speed", speed);
telemetry.update();
}
}

View File

@ -0,0 +1,51 @@
package org.firstinspires.ftc.teamcode;
import static org.firstinspires.ftc.robotcore.external.BlocksOpModeCompanion.hardwareMap;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.hardware.DcMotor;
import java.util.concurrent.TimeUnit;
@Autonomous(name="Motor_test")
public class Motor_Test extends OpMode {
DcMotor hwMotorDriveFrontLeft;
DcMotor hwMotorDriveFrontRight;
DcMotor hwMotorDriveBackLeft;
DcMotor hwMotorDriveBackRight;
public void init() {
hwMotorDriveFrontLeft = hardwareMap.dcMotor.get("Drive front lt");
hwMotorDriveFrontRight = hardwareMap.dcMotor.get("Drive front rt");
hwMotorDriveBackLeft = hardwareMap.dcMotor.get("Drive back lt");
hwMotorDriveBackRight = hardwareMap.dcMotor.get("Drive back rt");
}
public void sleepSec(int iSecs){
try {
Thread.sleep(iSecs*1000);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
public void loop() {
hwMotorDriveFrontLeft.setPower(1);
sleepSec(1);
hwMotorDriveFrontRight.setPower(1);
sleepSec(1);
hwMotorDriveBackLeft.setPower(1);
sleepSec(1);
hwMotorDriveBackRight.setPower(1);
sleepSec(1);
hwMotorDriveFrontLeft.setPower(0);
hwMotorDriveFrontRight.setPower(0);
hwMotorDriveBackLeft.setPower(0);
hwMotorDriveBackRight.setPower(0);
sleepSec(10);
}
}

View File

@ -0,0 +1,4 @@
package org.firstinspires.ftc.teamcode.trajectorysequence;
public class EmptySequenceException extends RuntimeException { }

View File

@ -0,0 +1,44 @@
package org.firstinspires.ftc.teamcode.trajectorysequence;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment.SequenceSegment;
import java.util.Collections;
import java.util.List;
public class TrajectorySequence {
private final List<SequenceSegment> sequenceList;
public TrajectorySequence(List<SequenceSegment> sequenceList) {
if (sequenceList.size() == 0) throw new EmptySequenceException();
this.sequenceList = Collections.unmodifiableList(sequenceList);
}
public Pose2d start() {
return sequenceList.get(0).getStartPose();
}
public Pose2d end() {
return sequenceList.get(sequenceList.size() - 1).getEndPose();
}
public double duration() {
double total = 0.0;
for (SequenceSegment segment : sequenceList) {
total += segment.getDuration();
}
return total;
}
public SequenceSegment get(int i) {
return sequenceList.get(i);
}
public int size() {
return sequenceList.size();
}
}

View File

@ -0,0 +1,710 @@
package org.firstinspires.ftc.teamcode.trajectorysequence;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.geometry.Vector2d;
import com.acmerobotics.roadrunner.path.PathContinuityViolationException;
import com.acmerobotics.roadrunner.profile.MotionProfile;
import com.acmerobotics.roadrunner.profile.MotionProfileGenerator;
import com.acmerobotics.roadrunner.profile.MotionState;
import com.acmerobotics.roadrunner.trajectory.DisplacementMarker;
import com.acmerobotics.roadrunner.trajectory.DisplacementProducer;
import com.acmerobotics.roadrunner.trajectory.MarkerCallback;
import com.acmerobotics.roadrunner.trajectory.SpatialMarker;
import com.acmerobotics.roadrunner.trajectory.TemporalMarker;
import com.acmerobotics.roadrunner.trajectory.TimeProducer;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.acmerobotics.roadrunner.trajectory.TrajectoryBuilder;
import com.acmerobotics.roadrunner.trajectory.TrajectoryMarker;
import com.acmerobotics.roadrunner.trajectory.constraints.TrajectoryAccelerationConstraint;
import com.acmerobotics.roadrunner.trajectory.constraints.TrajectoryVelocityConstraint;
import com.acmerobotics.roadrunner.util.Angle;
import org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment.SequenceSegment;
import org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment.TrajectorySegment;
import org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment.TurnSegment;
import org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment.WaitSegment;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class TrajectorySequenceBuilder {
private final double resolution = 0.25;
private final TrajectoryVelocityConstraint baseVelConstraint;
private final TrajectoryAccelerationConstraint baseAccelConstraint;
private TrajectoryVelocityConstraint currentVelConstraint;
private TrajectoryAccelerationConstraint currentAccelConstraint;
private final double baseTurnConstraintMaxAngVel;
private final double baseTurnConstraintMaxAngAccel;
private double currentTurnConstraintMaxAngVel;
private double currentTurnConstraintMaxAngAccel;
private final List<SequenceSegment> sequenceSegments;
private final List<TemporalMarker> temporalMarkers;
private final List<DisplacementMarker> displacementMarkers;
private final List<SpatialMarker> spatialMarkers;
private Pose2d lastPose;
private double tangentOffset;
private boolean setAbsoluteTangent;
private double absoluteTangent;
private TrajectoryBuilder currentTrajectoryBuilder;
private double currentDuration;
private double currentDisplacement;
private double lastDurationTraj;
private double lastDisplacementTraj;
public TrajectorySequenceBuilder(
Pose2d startPose,
Double startTangent,
TrajectoryVelocityConstraint baseVelConstraint,
TrajectoryAccelerationConstraint baseAccelConstraint,
double baseTurnConstraintMaxAngVel,
double baseTurnConstraintMaxAngAccel
) {
this.baseVelConstraint = baseVelConstraint;
this.baseAccelConstraint = baseAccelConstraint;
this.currentVelConstraint = baseVelConstraint;
this.currentAccelConstraint = baseAccelConstraint;
this.baseTurnConstraintMaxAngVel = baseTurnConstraintMaxAngVel;
this.baseTurnConstraintMaxAngAccel = baseTurnConstraintMaxAngAccel;
this.currentTurnConstraintMaxAngVel = baseTurnConstraintMaxAngVel;
this.currentTurnConstraintMaxAngAccel = baseTurnConstraintMaxAngAccel;
sequenceSegments = new ArrayList<>();
temporalMarkers = new ArrayList<>();
displacementMarkers = new ArrayList<>();
spatialMarkers = new ArrayList<>();
lastPose = startPose;
tangentOffset = 0.0;
setAbsoluteTangent = (startTangent != null);
absoluteTangent = startTangent != null ? startTangent : 0.0;
currentTrajectoryBuilder = null;
currentDuration = 0.0;
currentDisplacement = 0.0;
lastDurationTraj = 0.0;
lastDisplacementTraj = 0.0;
}
public TrajectorySequenceBuilder(
Pose2d startPose,
TrajectoryVelocityConstraint baseVelConstraint,
TrajectoryAccelerationConstraint baseAccelConstraint,
double baseTurnConstraintMaxAngVel,
double baseTurnConstraintMaxAngAccel
) {
this(
startPose, null,
baseVelConstraint, baseAccelConstraint,
baseTurnConstraintMaxAngVel, baseTurnConstraintMaxAngAccel
);
}
public TrajectorySequenceBuilder lineTo(Vector2d endPosition) {
return addPath(() -> currentTrajectoryBuilder.lineTo(endPosition, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder lineTo(
Vector2d endPosition,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.lineTo(endPosition, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder lineToConstantHeading(Vector2d endPosition) {
return addPath(() -> currentTrajectoryBuilder.lineToConstantHeading(endPosition, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder lineToConstantHeading(
Vector2d endPosition,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.lineToConstantHeading(endPosition, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder lineToLinearHeading(Pose2d endPose) {
return addPath(() -> currentTrajectoryBuilder.lineToLinearHeading(endPose, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder lineToLinearHeading(
Pose2d endPose,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.lineToLinearHeading(endPose, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder lineToSplineHeading(Pose2d endPose) {
return addPath(() -> currentTrajectoryBuilder.lineToSplineHeading(endPose, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder lineToSplineHeading(
Pose2d endPose,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.lineToSplineHeading(endPose, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder strafeTo(Vector2d endPosition) {
return addPath(() -> currentTrajectoryBuilder.strafeTo(endPosition, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder strafeTo(
Vector2d endPosition,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.strafeTo(endPosition, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder forward(double distance) {
return addPath(() -> currentTrajectoryBuilder.forward(distance, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder forward(
double distance,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.forward(distance, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder back(double distance) {
return addPath(() -> currentTrajectoryBuilder.back(distance, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder back(
double distance,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.back(distance, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder strafeLeft(double distance) {
return addPath(() -> currentTrajectoryBuilder.strafeLeft(distance, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder strafeLeft(
double distance,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.strafeLeft(distance, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder strafeRight(double distance) {
return addPath(() -> currentTrajectoryBuilder.strafeRight(distance, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder strafeRight(
double distance,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.strafeRight(distance, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder splineTo(Vector2d endPosition, double endHeading) {
return addPath(() -> currentTrajectoryBuilder.splineTo(endPosition, endHeading, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder splineTo(
Vector2d endPosition,
double endHeading,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.splineTo(endPosition, endHeading, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder splineToConstantHeading(Vector2d endPosition, double endHeading) {
return addPath(() -> currentTrajectoryBuilder.splineToConstantHeading(endPosition, endHeading, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder splineToConstantHeading(
Vector2d endPosition,
double endHeading,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.splineToConstantHeading(endPosition, endHeading, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder splineToLinearHeading(Pose2d endPose, double endHeading) {
return addPath(() -> currentTrajectoryBuilder.splineToLinearHeading(endPose, endHeading, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder splineToLinearHeading(
Pose2d endPose,
double endHeading,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.splineToLinearHeading(endPose, endHeading, velConstraint, accelConstraint));
}
public TrajectorySequenceBuilder splineToSplineHeading(Pose2d endPose, double endHeading) {
return addPath(() -> currentTrajectoryBuilder.splineToSplineHeading(endPose, endHeading, currentVelConstraint, currentAccelConstraint));
}
public TrajectorySequenceBuilder splineToSplineHeading(
Pose2d endPose,
double endHeading,
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
return addPath(() -> currentTrajectoryBuilder.splineToSplineHeading(endPose, endHeading, velConstraint, accelConstraint));
}
private TrajectorySequenceBuilder addPath(AddPathCallback callback) {
if (currentTrajectoryBuilder == null) newPath();
try {
callback.run();
} catch (PathContinuityViolationException e) {
newPath();
callback.run();
}
Trajectory builtTraj = currentTrajectoryBuilder.build();
double durationDifference = builtTraj.duration() - lastDurationTraj;
double displacementDifference = builtTraj.getPath().length() - lastDisplacementTraj;
lastPose = builtTraj.end();
currentDuration += durationDifference;
currentDisplacement += displacementDifference;
lastDurationTraj = builtTraj.duration();
lastDisplacementTraj = builtTraj.getPath().length();
return this;
}
public TrajectorySequenceBuilder setTangent(double tangent) {
setAbsoluteTangent = true;
absoluteTangent = tangent;
pushPath();
return this;
}
private TrajectorySequenceBuilder setTangentOffset(double offset) {
setAbsoluteTangent = false;
this.tangentOffset = offset;
this.pushPath();
return this;
}
public TrajectorySequenceBuilder setReversed(boolean reversed) {
return reversed ? this.setTangentOffset(Math.toRadians(180.0)) : this.setTangentOffset(0.0);
}
public TrajectorySequenceBuilder setConstraints(
TrajectoryVelocityConstraint velConstraint,
TrajectoryAccelerationConstraint accelConstraint
) {
this.currentVelConstraint = velConstraint;
this.currentAccelConstraint = accelConstraint;
return this;
}
public TrajectorySequenceBuilder resetConstraints() {
this.currentVelConstraint = this.baseVelConstraint;
this.currentAccelConstraint = this.baseAccelConstraint;
return this;
}
public TrajectorySequenceBuilder setVelConstraint(TrajectoryVelocityConstraint velConstraint) {
this.currentVelConstraint = velConstraint;
return this;
}
public TrajectorySequenceBuilder resetVelConstraint() {
this.currentVelConstraint = this.baseVelConstraint;
return this;
}
public TrajectorySequenceBuilder setAccelConstraint(TrajectoryAccelerationConstraint accelConstraint) {
this.currentAccelConstraint = accelConstraint;
return this;
}
public TrajectorySequenceBuilder resetAccelConstraint() {
this.currentAccelConstraint = this.baseAccelConstraint;
return this;
}
public TrajectorySequenceBuilder setTurnConstraint(double maxAngVel, double maxAngAccel) {
this.currentTurnConstraintMaxAngVel = maxAngVel;
this.currentTurnConstraintMaxAngAccel = maxAngAccel;
return this;
}
public TrajectorySequenceBuilder resetTurnConstraint() {
this.currentTurnConstraintMaxAngVel = baseTurnConstraintMaxAngVel;
this.currentTurnConstraintMaxAngAccel = baseTurnConstraintMaxAngAccel;
return this;
}
public TrajectorySequenceBuilder addTemporalMarker(MarkerCallback callback) {
return this.addTemporalMarker(currentDuration, callback);
}
public TrajectorySequenceBuilder UNSTABLE_addTemporalMarkerOffset(double offset, MarkerCallback callback) {
return this.addTemporalMarker(currentDuration + offset, callback);
}
public TrajectorySequenceBuilder addTemporalMarker(double time, MarkerCallback callback) {
return this.addTemporalMarker(0.0, time, callback);
}
public TrajectorySequenceBuilder addTemporalMarker(double scale, double offset, MarkerCallback callback) {
return this.addTemporalMarker(time -> scale * time + offset, callback);
}
public TrajectorySequenceBuilder addTemporalMarker(TimeProducer time, MarkerCallback callback) {
this.temporalMarkers.add(new TemporalMarker(time, callback));
return this;
}
public TrajectorySequenceBuilder addSpatialMarker(Vector2d point, MarkerCallback callback) {
this.spatialMarkers.add(new SpatialMarker(point, callback));
return this;
}
public TrajectorySequenceBuilder addDisplacementMarker(MarkerCallback callback) {
return this.addDisplacementMarker(currentDisplacement, callback);
}
public TrajectorySequenceBuilder UNSTABLE_addDisplacementMarkerOffset(double offset, MarkerCallback callback) {
return this.addDisplacementMarker(currentDisplacement + offset, callback);
}
public TrajectorySequenceBuilder addDisplacementMarker(double displacement, MarkerCallback callback) {
return this.addDisplacementMarker(0.0, displacement, callback);
}
public TrajectorySequenceBuilder addDisplacementMarker(double scale, double offset, MarkerCallback callback) {
return addDisplacementMarker((displacement -> scale * displacement + offset), callback);
}
public TrajectorySequenceBuilder addDisplacementMarker(DisplacementProducer displacement, MarkerCallback callback) {
displacementMarkers.add(new DisplacementMarker(displacement, callback));
return this;
}
public TrajectorySequenceBuilder turn(double angle) {
return turn(angle, currentTurnConstraintMaxAngVel, currentTurnConstraintMaxAngAccel);
}
public TrajectorySequenceBuilder turn(double angle, double maxAngVel, double maxAngAccel) {
pushPath();
MotionProfile turnProfile = MotionProfileGenerator.generateSimpleMotionProfile(
new MotionState(lastPose.getHeading(), 0.0, 0.0, 0.0),
new MotionState(lastPose.getHeading() + angle, 0.0, 0.0, 0.0),
maxAngVel,
maxAngAccel
);
sequenceSegments.add(new TurnSegment(lastPose, angle, turnProfile, Collections.emptyList()));
lastPose = new Pose2d(
lastPose.getX(), lastPose.getY(),
Angle.norm(lastPose.getHeading() + angle)
);
currentDuration += turnProfile.duration();
return this;
}
public TrajectorySequenceBuilder waitSeconds(double seconds) {
pushPath();
sequenceSegments.add(new WaitSegment(lastPose, seconds, Collections.emptyList()));
currentDuration += seconds;
return this;
}
public TrajectorySequenceBuilder addTrajectory(Trajectory trajectory) {
pushPath();
sequenceSegments.add(new TrajectorySegment(trajectory));
return this;
}
private void pushPath() {
if (currentTrajectoryBuilder != null) {
Trajectory builtTraj = currentTrajectoryBuilder.build();
sequenceSegments.add(new TrajectorySegment(builtTraj));
}
currentTrajectoryBuilder = null;
}
private void newPath() {
if (currentTrajectoryBuilder != null)
pushPath();
lastDurationTraj = 0.0;
lastDisplacementTraj = 0.0;
double tangent = setAbsoluteTangent ? absoluteTangent : Angle.norm(lastPose.getHeading() + tangentOffset);
currentTrajectoryBuilder = new TrajectoryBuilder(lastPose, tangent, currentVelConstraint, currentAccelConstraint, resolution);
}
public TrajectorySequence build() {
pushPath();
List<TrajectoryMarker> globalMarkers = convertMarkersToGlobal(
sequenceSegments,
temporalMarkers, displacementMarkers, spatialMarkers
);
return new TrajectorySequence(projectGlobalMarkersToLocalSegments(globalMarkers, sequenceSegments));
}
private List<TrajectoryMarker> convertMarkersToGlobal(
List<SequenceSegment> sequenceSegments,
List<TemporalMarker> temporalMarkers,
List<DisplacementMarker> displacementMarkers,
List<SpatialMarker> spatialMarkers
) {
ArrayList<TrajectoryMarker> trajectoryMarkers = new ArrayList<>();
// Convert temporal markers
for (TemporalMarker marker : temporalMarkers) {
trajectoryMarkers.add(
new TrajectoryMarker(marker.getProducer().produce(currentDuration), marker.getCallback())
);
}
// Convert displacement markers
for (DisplacementMarker marker : displacementMarkers) {
double time = displacementToTime(
sequenceSegments,
marker.getProducer().produce(currentDisplacement)
);
trajectoryMarkers.add(
new TrajectoryMarker(
time,
marker.getCallback()
)
);
}
// Convert spatial markers
for (SpatialMarker marker : spatialMarkers) {
trajectoryMarkers.add(
new TrajectoryMarker(
pointToTime(sequenceSegments, marker.getPoint()),
marker.getCallback()
)
);
}
return trajectoryMarkers;
}
private List<SequenceSegment> projectGlobalMarkersToLocalSegments(List<TrajectoryMarker> markers, List<SequenceSegment> sequenceSegments) {
if (sequenceSegments.isEmpty()) return Collections.emptyList();
markers.sort(Comparator.comparingDouble(TrajectoryMarker::getTime));
int segmentIndex = 0;
double currentTime = 0;
for (TrajectoryMarker marker : markers) {
SequenceSegment segment = null;
double markerTime = marker.getTime();
double segmentOffsetTime = 0;
while (segmentIndex < sequenceSegments.size()) {
SequenceSegment seg = sequenceSegments.get(segmentIndex);
if (currentTime + seg.getDuration() >= markerTime) {
segment = seg;
segmentOffsetTime = markerTime - currentTime;
break;
} else {
currentTime += seg.getDuration();
segmentIndex++;
}
}
if (segmentIndex >= sequenceSegments.size()) {
segment = sequenceSegments.get(sequenceSegments.size()-1);
segmentOffsetTime = segment.getDuration();
}
SequenceSegment newSegment = null;
if (segment instanceof WaitSegment) {
WaitSegment thisSegment = (WaitSegment) segment;
List<TrajectoryMarker> newMarkers = new ArrayList<>(thisSegment.getMarkers());
newMarkers.add(new TrajectoryMarker(segmentOffsetTime, marker.getCallback()));
newSegment = new WaitSegment(thisSegment.getStartPose(), thisSegment.getDuration(), newMarkers);
} else if (segment instanceof TurnSegment) {
TurnSegment thisSegment = (TurnSegment) segment;
List<TrajectoryMarker> newMarkers = new ArrayList<>(thisSegment.getMarkers());
newMarkers.add(new TrajectoryMarker(segmentOffsetTime, marker.getCallback()));
newSegment = new TurnSegment(thisSegment.getStartPose(), thisSegment.getTotalRotation(), thisSegment.getMotionProfile(), newMarkers);
} else if (segment instanceof TrajectorySegment) {
TrajectorySegment thisSegment = (TrajectorySegment) segment;
List<TrajectoryMarker> newMarkers = new ArrayList<>(thisSegment.getTrajectory().getMarkers());
newMarkers.add(new TrajectoryMarker(segmentOffsetTime, marker.getCallback()));
newSegment = new TrajectorySegment(new Trajectory(thisSegment.getTrajectory().getPath(), thisSegment.getTrajectory().getProfile(), newMarkers));
}
sequenceSegments.set(segmentIndex, newSegment);
}
return sequenceSegments;
}
// Taken from Road Runner's TrajectoryGenerator.displacementToTime() since it's private
// note: this assumes that the profile position is monotonic increasing
private Double motionProfileDisplacementToTime(MotionProfile profile, double s) {
double tLo = 0.0;
double tHi = profile.duration();
while (!(Math.abs(tLo - tHi) < 1e-6)) {
double tMid = 0.5 * (tLo + tHi);
if (profile.get(tMid).getX() > s) {
tHi = tMid;
} else {
tLo = tMid;
}
}
return 0.5 * (tLo + tHi);
}
private Double displacementToTime(List<SequenceSegment> sequenceSegments, double s) {
double currentTime = 0.0;
double currentDisplacement = 0.0;
for (SequenceSegment segment : sequenceSegments) {
if (segment instanceof TrajectorySegment) {
TrajectorySegment thisSegment = (TrajectorySegment) segment;
double segmentLength = thisSegment.getTrajectory().getPath().length();
if (currentDisplacement + segmentLength > s) {
double target = s - currentDisplacement;
double timeInSegment = motionProfileDisplacementToTime(
thisSegment.getTrajectory().getProfile(),
target
);
return currentTime + timeInSegment;
} else {
currentDisplacement += segmentLength;
currentTime += thisSegment.getTrajectory().duration();
}
} else {
currentTime += segment.getDuration();
}
}
return 0.0;
}
private Double pointToTime(List<SequenceSegment> sequenceSegments, Vector2d point) {
class ComparingPoints {
private final double distanceToPoint;
private final double totalDisplacement;
private final double thisPathDisplacement;
public ComparingPoints(double distanceToPoint, double totalDisplacement, double thisPathDisplacement) {
this.distanceToPoint = distanceToPoint;
this.totalDisplacement = totalDisplacement;
this.thisPathDisplacement = thisPathDisplacement;
}
}
List<ComparingPoints> projectedPoints = new ArrayList<>();
for (SequenceSegment segment : sequenceSegments) {
if (segment instanceof TrajectorySegment) {
TrajectorySegment thisSegment = (TrajectorySegment) segment;
double displacement = thisSegment.getTrajectory().getPath().project(point, 0.25);
Vector2d projectedPoint = thisSegment.getTrajectory().getPath().get(displacement).vec();
double distanceToPoint = point.minus(projectedPoint).norm();
double totalDisplacement = 0.0;
for (ComparingPoints comparingPoint : projectedPoints) {
totalDisplacement += comparingPoint.totalDisplacement;
}
totalDisplacement += displacement;
projectedPoints.add(new ComparingPoints(distanceToPoint, displacement, totalDisplacement));
}
}
ComparingPoints closestPoint = null;
for (ComparingPoints comparingPoint : projectedPoints) {
if (closestPoint == null) {
closestPoint = comparingPoint;
continue;
}
if (comparingPoint.distanceToPoint < closestPoint.distanceToPoint)
closestPoint = comparingPoint;
}
return displacementToTime(sequenceSegments, closestPoint.thisPathDisplacement);
}
private interface AddPathCallback {
void run();
}
}

View File

@ -0,0 +1,306 @@
package org.firstinspires.ftc.teamcode.trajectorysequence;
import androidx.annotation.Nullable;
import com.acmerobotics.dashboard.FtcDashboard;
import com.acmerobotics.dashboard.canvas.Canvas;
import com.acmerobotics.dashboard.config.Config;
import com.acmerobotics.dashboard.telemetry.TelemetryPacket;
import com.acmerobotics.roadrunner.control.PIDCoefficients;
import com.acmerobotics.roadrunner.control.PIDFController;
import com.acmerobotics.roadrunner.drive.DriveSignal;
import com.acmerobotics.roadrunner.followers.TrajectoryFollower;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.profile.MotionState;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.acmerobotics.roadrunner.trajectory.TrajectoryMarker;
import com.acmerobotics.roadrunner.util.NanoClock;
import com.qualcomm.robotcore.hardware.VoltageSensor;
import org.firstinspires.ftc.teamcode.drive.DriveConstants;
import org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment.SequenceSegment;
import org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment.TrajectorySegment;
import org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment.TurnSegment;
import org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment.WaitSegment;
import org.firstinspires.ftc.teamcode.util.DashboardUtil;
import org.firstinspires.ftc.teamcode.util.LogFiles;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
@Config
public class TrajectorySequenceRunner {
public static String COLOR_INACTIVE_TRAJECTORY = "#4caf507a";
public static String COLOR_INACTIVE_TURN = "#7c4dff7a";
public static String COLOR_INACTIVE_WAIT = "#dd2c007a";
public static String COLOR_ACTIVE_TRAJECTORY = "#4CAF50";
public static String COLOR_ACTIVE_TURN = "#7c4dff";
public static String COLOR_ACTIVE_WAIT = "#dd2c00";
public static int POSE_HISTORY_LIMIT = 100;
private final TrajectoryFollower follower;
private final PIDFController turnController;
private final NanoClock clock;
private TrajectorySequence currentTrajectorySequence;
private double currentSegmentStartTime;
private int currentSegmentIndex;
private int lastSegmentIndex;
private Pose2d lastPoseError = new Pose2d();
List<TrajectoryMarker> remainingMarkers = new ArrayList<>();
private final FtcDashboard dashboard;
private final LinkedList<Pose2d> poseHistory = new LinkedList<>();
private VoltageSensor voltageSensor;
private List<Integer> lastDriveEncPositions, lastDriveEncVels, lastTrackingEncPositions, lastTrackingEncVels;
public TrajectorySequenceRunner(
TrajectoryFollower follower, PIDCoefficients headingPIDCoefficients, VoltageSensor voltageSensor,
List<Integer> lastDriveEncPositions, List<Integer> lastDriveEncVels, List<Integer> lastTrackingEncPositions, List<Integer> lastTrackingEncVels
) {
this.follower = follower;
turnController = new PIDFController(headingPIDCoefficients);
turnController.setInputBounds(0, 2 * Math.PI);
this.voltageSensor = voltageSensor;
this.lastDriveEncPositions = lastDriveEncPositions;
this.lastDriveEncVels = lastDriveEncVels;
this.lastTrackingEncPositions = lastTrackingEncPositions;
this.lastTrackingEncVels = lastTrackingEncVels;
clock = NanoClock.system();
dashboard = FtcDashboard.getInstance();
dashboard.setTelemetryTransmissionInterval(25);
}
public void followTrajectorySequenceAsync(TrajectorySequence trajectorySequence) {
currentTrajectorySequence = trajectorySequence;
currentSegmentStartTime = clock.seconds();
currentSegmentIndex = 0;
lastSegmentIndex = -1;
}
public @Nullable
DriveSignal update(Pose2d poseEstimate, Pose2d poseVelocity) {
Pose2d targetPose = null;
DriveSignal driveSignal = null;
TelemetryPacket packet = new TelemetryPacket();
Canvas fieldOverlay = packet.fieldOverlay();
SequenceSegment currentSegment = null;
if (currentTrajectorySequence != null) {
if (currentSegmentIndex >= currentTrajectorySequence.size()) {
for (TrajectoryMarker marker : remainingMarkers) {
marker.getCallback().onMarkerReached();
}
remainingMarkers.clear();
currentTrajectorySequence = null;
}
if (currentTrajectorySequence == null)
return new DriveSignal();
double now = clock.seconds();
boolean isNewTransition = currentSegmentIndex != lastSegmentIndex;
currentSegment = currentTrajectorySequence.get(currentSegmentIndex);
if (isNewTransition) {
currentSegmentStartTime = now;
lastSegmentIndex = currentSegmentIndex;
for (TrajectoryMarker marker : remainingMarkers) {
marker.getCallback().onMarkerReached();
}
remainingMarkers.clear();
remainingMarkers.addAll(currentSegment.getMarkers());
Collections.sort(remainingMarkers, (t1, t2) -> Double.compare(t1.getTime(), t2.getTime()));
}
double deltaTime = now - currentSegmentStartTime;
if (currentSegment instanceof TrajectorySegment) {
Trajectory currentTrajectory = ((TrajectorySegment) currentSegment).getTrajectory();
if (isNewTransition)
follower.followTrajectory(currentTrajectory);
if (!follower.isFollowing()) {
currentSegmentIndex++;
driveSignal = new DriveSignal();
} else {
driveSignal = follower.update(poseEstimate, poseVelocity);
lastPoseError = follower.getLastError();
}
targetPose = currentTrajectory.get(deltaTime);
} else if (currentSegment instanceof TurnSegment) {
MotionState targetState = ((TurnSegment) currentSegment).getMotionProfile().get(deltaTime);
turnController.setTargetPosition(targetState.getX());
double correction = turnController.update(poseEstimate.getHeading());
double targetOmega = targetState.getV();
double targetAlpha = targetState.getA();
lastPoseError = new Pose2d(0, 0, turnController.getLastError());
Pose2d startPose = currentSegment.getStartPose();
targetPose = startPose.copy(startPose.getX(), startPose.getY(), targetState.getX());
driveSignal = new DriveSignal(
new Pose2d(0, 0, targetOmega + correction),
new Pose2d(0, 0, targetAlpha)
);
if (deltaTime >= currentSegment.getDuration()) {
currentSegmentIndex++;
driveSignal = new DriveSignal();
}
} else if (currentSegment instanceof WaitSegment) {
lastPoseError = new Pose2d();
targetPose = currentSegment.getStartPose();
driveSignal = new DriveSignal();
if (deltaTime >= currentSegment.getDuration()) {
currentSegmentIndex++;
}
}
while (remainingMarkers.size() > 0 && deltaTime > remainingMarkers.get(0).getTime()) {
remainingMarkers.get(0).getCallback().onMarkerReached();
remainingMarkers.remove(0);
}
}
poseHistory.add(poseEstimate);
if (POSE_HISTORY_LIMIT > -1 && poseHistory.size() > POSE_HISTORY_LIMIT) {
poseHistory.removeFirst();
}
final double NOMINAL_VOLTAGE = 12.0;
double voltage = voltageSensor.getVoltage();
if (driveSignal != null && !DriveConstants.RUN_USING_ENCODER) {
driveSignal = new DriveSignal(
driveSignal.getVel().times(NOMINAL_VOLTAGE / voltage),
driveSignal.getAccel().times(NOMINAL_VOLTAGE / voltage)
);
}
if (targetPose != null) {
LogFiles.record(
targetPose, poseEstimate, voltage,
lastDriveEncPositions, lastDriveEncVels, lastTrackingEncPositions, lastTrackingEncVels
);
}
packet.put("x", poseEstimate.getX());
packet.put("y", poseEstimate.getY());
packet.put("heading (deg)", Math.toDegrees(poseEstimate.getHeading()));
packet.put("xError", getLastPoseError().getX());
packet.put("yError", getLastPoseError().getY());
packet.put("headingError (deg)", Math.toDegrees(getLastPoseError().getHeading()));
draw(fieldOverlay, currentTrajectorySequence, currentSegment, targetPose, poseEstimate);
dashboard.sendTelemetryPacket(packet);
return driveSignal;
}
private void draw(
Canvas fieldOverlay,
TrajectorySequence sequence, SequenceSegment currentSegment,
Pose2d targetPose, Pose2d poseEstimate
) {
if (sequence != null) {
for (int i = 0; i < sequence.size(); i++) {
SequenceSegment segment = sequence.get(i);
if (segment instanceof TrajectorySegment) {
fieldOverlay.setStrokeWidth(1);
fieldOverlay.setStroke(COLOR_INACTIVE_TRAJECTORY);
DashboardUtil.drawSampledPath(fieldOverlay, ((TrajectorySegment) segment).getTrajectory().getPath());
} else if (segment instanceof TurnSegment) {
Pose2d pose = segment.getStartPose();
fieldOverlay.setFill(COLOR_INACTIVE_TURN);
fieldOverlay.fillCircle(pose.getX(), pose.getY(), 2);
} else if (segment instanceof WaitSegment) {
Pose2d pose = segment.getStartPose();
fieldOverlay.setStrokeWidth(1);
fieldOverlay.setStroke(COLOR_INACTIVE_WAIT);
fieldOverlay.strokeCircle(pose.getX(), pose.getY(), 3);
}
}
}
if (currentSegment != null) {
if (currentSegment instanceof TrajectorySegment) {
Trajectory currentTrajectory = ((TrajectorySegment) currentSegment).getTrajectory();
fieldOverlay.setStrokeWidth(1);
fieldOverlay.setStroke(COLOR_ACTIVE_TRAJECTORY);
DashboardUtil.drawSampledPath(fieldOverlay, currentTrajectory.getPath());
} else if (currentSegment instanceof TurnSegment) {
Pose2d pose = currentSegment.getStartPose();
fieldOverlay.setFill(COLOR_ACTIVE_TURN);
fieldOverlay.fillCircle(pose.getX(), pose.getY(), 3);
} else if (currentSegment instanceof WaitSegment) {
Pose2d pose = currentSegment.getStartPose();
fieldOverlay.setStrokeWidth(1);
fieldOverlay.setStroke(COLOR_ACTIVE_WAIT);
fieldOverlay.strokeCircle(pose.getX(), pose.getY(), 3);
}
}
if (targetPose != null) {
fieldOverlay.setStrokeWidth(1);
fieldOverlay.setStroke("#4CAF50");
DashboardUtil.drawRobot(fieldOverlay, targetPose);
}
fieldOverlay.setStroke("#3F51B5");
DashboardUtil.drawPoseHistory(fieldOverlay, poseHistory);
fieldOverlay.setStroke("#3F51B5");
DashboardUtil.drawRobot(fieldOverlay, poseEstimate);
}
public Pose2d getLastPoseError() {
return lastPoseError;
}
public boolean isBusy() {
return currentTrajectorySequence != null;
}
}

View File

@ -0,0 +1,40 @@
package org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.trajectory.TrajectoryMarker;
import java.util.List;
public abstract class SequenceSegment {
private final double duration;
private final Pose2d startPose;
private final Pose2d endPose;
private final List<TrajectoryMarker> markers;
protected SequenceSegment(
double duration,
Pose2d startPose, Pose2d endPose,
List<TrajectoryMarker> markers
) {
this.duration = duration;
this.startPose = startPose;
this.endPose = endPose;
this.markers = markers;
}
public double getDuration() {
return this.duration;
}
public Pose2d getStartPose() {
return startPose;
}
public Pose2d getEndPose() {
return endPose;
}
public List<TrajectoryMarker> getMarkers() {
return markers;
}
}

View File

@ -0,0 +1,20 @@
package org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import java.util.Collections;
public final class TrajectorySegment extends SequenceSegment {
private final Trajectory trajectory;
public TrajectorySegment(Trajectory trajectory) {
// Note: Markers are already stored in the `Trajectory` itself.
// This class should not hold any markers
super(trajectory.duration(), trajectory.start(), trajectory.end(), Collections.emptyList());
this.trajectory = trajectory;
}
public Trajectory getTrajectory() {
return this.trajectory;
}
}

View File

@ -0,0 +1,36 @@
package org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.profile.MotionProfile;
import com.acmerobotics.roadrunner.trajectory.TrajectoryMarker;
import com.acmerobotics.roadrunner.util.Angle;
import java.util.List;
public final class TurnSegment extends SequenceSegment {
private final double totalRotation;
private final MotionProfile motionProfile;
public TurnSegment(Pose2d startPose, double totalRotation, MotionProfile motionProfile, List<TrajectoryMarker> markers) {
super(
motionProfile.duration(),
startPose,
new Pose2d(
startPose.getX(), startPose.getY(),
Angle.norm(startPose.getHeading() + totalRotation)
),
markers
);
this.totalRotation = totalRotation;
this.motionProfile = motionProfile;
}
public final double getTotalRotation() {
return this.totalRotation;
}
public final MotionProfile getMotionProfile() {
return this.motionProfile;
}
}

View File

@ -0,0 +1,12 @@
package org.firstinspires.ftc.teamcode.trajectorysequence.sequencesegment;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.trajectory.TrajectoryMarker;
import java.util.List;
public final class WaitSegment extends SequenceSegment {
public WaitSegment(Pose2d pose, double seconds, List<TrajectoryMarker> markers) {
super(seconds, pose, pose, markers);
}
}

View File

@ -0,0 +1,70 @@
package org.firstinspires.ftc.teamcode.util;
import androidx.annotation.Nullable;
import com.acmerobotics.roadrunner.trajectory.Trajectory;
import com.acmerobotics.roadrunner.trajectory.TrajectoryBuilder;
import com.acmerobotics.roadrunner.trajectory.config.TrajectoryConfig;
import com.acmerobotics.roadrunner.trajectory.config.TrajectoryConfigManager;
import com.acmerobotics.roadrunner.trajectory.config.TrajectoryGroupConfig;
import org.firstinspires.ftc.robotcore.internal.system.AppUtil;
import java.io.IOException;
import java.io.InputStream;
/**
* Set of utilities for loading trajectories from assets (the plugin save location).
*/
public class AssetsTrajectoryManager {
/**
* Loads the group config.
*/
public static @Nullable
TrajectoryGroupConfig loadGroupConfig() {
try {
InputStream inputStream = AppUtil.getDefContext().getAssets().open(
"trajectory/" + TrajectoryConfigManager.GROUP_FILENAME);
return TrajectoryConfigManager.loadGroupConfig(inputStream);
} catch (IOException e) {
return null;
}
}
/**
* Loads a trajectory config with the given name.
*/
public static @Nullable TrajectoryConfig loadConfig(String name) {
try {
InputStream inputStream = AppUtil.getDefContext().getAssets().open(
"trajectory/" + name + ".yaml");
return TrajectoryConfigManager.loadConfig(inputStream);
} catch (IOException e) {
return null;
}
}
/**
* Loads a trajectory builder with the given name.
*/
public static @Nullable TrajectoryBuilder loadBuilder(String name) {
TrajectoryGroupConfig groupConfig = loadGroupConfig();
TrajectoryConfig config = loadConfig(name);
if (groupConfig == null || config == null) {
return null;
}
return config.toTrajectoryBuilder(groupConfig);
}
/**
* Loads a trajectory with the given name.
*/
public static @Nullable Trajectory load(String name) {
TrajectoryBuilder builder = loadBuilder(name);
if (builder == null) {
return null;
}
return builder.build();
}
}

View File

@ -0,0 +1,45 @@
package org.firstinspires.ftc.teamcode.util;
/**
* IMU axes signs in the order XYZ (after remapping).
*/
public enum AxesSigns {
PPP(0b000),
PPN(0b001),
PNP(0b010),
PNN(0b011),
NPP(0b100),
NPN(0b101),
NNP(0b110),
NNN(0b111);
public final int bVal;
AxesSigns(int bVal) {
this.bVal = bVal;
}
public static AxesSigns fromBinaryValue(int bVal) {
int maskedVal = bVal & 0x07;
switch (maskedVal) {
case 0b000:
return AxesSigns.PPP;
case 0b001:
return AxesSigns.PPN;
case 0b010:
return AxesSigns.PNP;
case 0b011:
return AxesSigns.PNN;
case 0b100:
return AxesSigns.NPP;
case 0b101:
return AxesSigns.NPN;
case 0b110:
return AxesSigns.NNP;
case 0b111:
return AxesSigns.NNN;
default:
throw new IllegalStateException("Unexpected value for maskedVal: " + maskedVal);
}
}
}

View File

@ -0,0 +1,8 @@
package org.firstinspires.ftc.teamcode.util;
/**
* A direction for an axis to be remapped to
*/
public enum AxisDirection {
POS_X, NEG_X, POS_Y, NEG_Y, POS_Z, NEG_Z
}

View File

@ -0,0 +1,54 @@
package org.firstinspires.ftc.teamcode.util;
import com.acmerobotics.dashboard.canvas.Canvas;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.acmerobotics.roadrunner.geometry.Vector2d;
import com.acmerobotics.roadrunner.path.Path;
import java.util.List;
/**
* Set of helper functions for drawing Road Runner paths and trajectories on dashboard canvases.
*/
public class DashboardUtil {
private static final double DEFAULT_RESOLUTION = 2.0; // distance units; presumed inches
private static final double ROBOT_RADIUS = 9; // in
public static void drawPoseHistory(Canvas canvas, List<Pose2d> poseHistory) {
double[] xPoints = new double[poseHistory.size()];
double[] yPoints = new double[poseHistory.size()];
for (int i = 0; i < poseHistory.size(); i++) {
Pose2d pose = poseHistory.get(i);
xPoints[i] = pose.getX();
yPoints[i] = pose.getY();
}
canvas.strokePolyline(xPoints, yPoints);
}
public static void drawSampledPath(Canvas canvas, Path path, double resolution) {
int samples = (int) Math.ceil(path.length() / resolution);
double[] xPoints = new double[samples];
double[] yPoints = new double[samples];
double dx = path.length() / (samples - 1);
for (int i = 0; i < samples; i++) {
double displacement = i * dx;
Pose2d pose = path.get(displacement);
xPoints[i] = pose.getX();
yPoints[i] = pose.getY();
}
canvas.strokePolyline(xPoints, yPoints);
}
public static void drawSampledPath(Canvas canvas, Path path) {
drawSampledPath(canvas, path, DEFAULT_RESOLUTION);
}
public static void drawRobot(Canvas canvas, Pose2d pose) {
canvas.strokeCircle(pose.getX(), pose.getY(), ROBOT_RADIUS);
Vector2d v = pose.headingVec().times(ROBOT_RADIUS);
double x1 = pose.getX() + v.getX() / 2, y1 = pose.getY() + v.getY() / 2;
double x2 = pose.getX() + v.getX(), y2 = pose.getY() + v.getY();
canvas.strokeLine(x1, y1, x2, y2);
}
}

125
TeamCode/util/Encoder.java Normal file
View File

@ -0,0 +1,125 @@
package org.firstinspires.ftc.teamcode.util;
import com.acmerobotics.roadrunner.util.NanoClock;
import com.qualcomm.robotcore.hardware.DcMotorEx;
import com.qualcomm.robotcore.hardware.DcMotorSimple;
/**
* Wraps a motor instance to provide corrected velocity counts and allow reversing independently of the corresponding
* slot's motor direction
*/
public class Encoder {
private final static int CPS_STEP = 0x10000;
private static double inverseOverflow(double input, double estimate) {
// convert to uint16
int real = (int) input & 0xffff;
// initial, modulo-based correction: it can recover the remainder of 5 of the upper 16 bits
// because the velocity is always a multiple of 20 cps due to Expansion Hub's 50ms measurement window
real += ((real % 20) / 4) * CPS_STEP;
// estimate-based correction: it finds the nearest multiple of 5 to correct the upper bits by
real += Math.round((estimate - real) / (5 * CPS_STEP)) * 5 * CPS_STEP;
return real;
}
public enum Direction {
FORWARD(1),
REVERSE(-1);
private int multiplier;
Direction(int multiplier) {
this.multiplier = multiplier;
}
public int getMultiplier() {
return multiplier;
}
}
private DcMotorEx motor;
private NanoClock clock;
private Direction direction;
private int lastPosition;
private int velocityEstimateIdx;
private double[] velocityEstimates;
private double lastUpdateTime;
public Encoder(DcMotorEx motor, NanoClock clock) {
this.motor = motor;
this.clock = clock;
this.direction = Direction.FORWARD;
this.lastPosition = 0;
this.velocityEstimates = new double[3];
this.lastUpdateTime = clock.seconds();
}
public Encoder(DcMotorEx motor) {
this(motor, NanoClock.system());
}
public Direction getDirection() {
return direction;
}
private int getMultiplier() {
return getDirection().getMultiplier() * (motor.getDirection() == DcMotorSimple.Direction.FORWARD ? 1 : -1);
}
/**
* Allows you to set the direction of the counts and velocity without modifying the motor's direction state
* @param direction either reverse or forward depending on if encoder counts should be negated
*/
public void setDirection(Direction direction) {
this.direction = direction;
}
/**
* Gets the position from the underlying motor and adjusts for the set direction.
* Additionally, this method updates the velocity estimates used for compensated velocity
*
* @return encoder position
*/
public int getCurrentPosition() {
int multiplier = getMultiplier();
int currentPosition = motor.getCurrentPosition() * multiplier;
if (currentPosition != lastPosition) {
double currentTime = clock.seconds();
double dt = currentTime - lastUpdateTime;
velocityEstimates[velocityEstimateIdx] = (currentPosition - lastPosition) / dt;
velocityEstimateIdx = (velocityEstimateIdx + 1) % 3;
lastPosition = currentPosition;
lastUpdateTime = currentTime;
}
return currentPosition;
}
/**
* Gets the velocity directly from the underlying motor and compensates for the direction
* See {@link #getCorrectedVelocity} for high (>2^15) counts per second velocities (such as on REV Through Bore)
*
* @return raw velocity
*/
public double getRawVelocity() {
int multiplier = getMultiplier();
return motor.getVelocity() * multiplier;
}
/**
* Uses velocity estimates gathered in {@link #getCurrentPosition} to estimate the upper bits of velocity
* that are lost in overflow due to velocity being transmitted as 16 bits.
* CAVEAT: must regularly call {@link #getCurrentPosition} for the compensation to work correctly.
*
* @return corrected velocity
*/
public double getCorrectedVelocity() {
double median = velocityEstimates[0] > velocityEstimates[1]
? Math.max(velocityEstimates[1], Math.min(velocityEstimates[0], velocityEstimates[2]))
: Math.max(velocityEstimates[0], Math.min(velocityEstimates[1], velocityEstimates[2]));
return inverseOverflow(getRawVelocity(), median);
}
}

273
TeamCode/util/LogFiles.java Normal file
View File

@ -0,0 +1,273 @@
package org.firstinspires.ftc.teamcode.util;
import android.annotation.SuppressLint;
import android.content.Context;
import com.acmerobotics.roadrunner.geometry.Pose2d;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.eventloop.opmode.OpMode;
import com.qualcomm.robotcore.eventloop.opmode.OpModeManagerImpl;
import com.qualcomm.robotcore.eventloop.opmode.OpModeManagerNotifier;
import com.qualcomm.robotcore.util.RobotLog;
import com.qualcomm.robotcore.util.WebHandlerManager;
import org.firstinspires.ftc.ftccommon.external.WebHandlerRegistrar;
import org.firstinspires.ftc.robotcore.internal.system.AppUtil;
import org.firstinspires.ftc.teamcode.drive.DriveConstants;
import org.firstinspires.ftc.teamcode.drive.SampleMecanumDrive;
import org.firstinspires.ftc.teamcode.drive.SampleTankDrive;
import org.firstinspires.ftc.teamcode.drive.StandardTrackingWheelLocalizer;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Objects;
import fi.iki.elonen.NanoHTTPD;
public final class LogFiles {
private static final File ROOT =
new File(AppUtil.ROOT_FOLDER + "/RoadRunner/logs/");
public static LogFile log = new LogFile("uninitialized");
public static class LogFile {
public String version = "quickstart1 v2";
public String opModeName;
public long msInit = System.currentTimeMillis();
public long nsInit = System.nanoTime();
public long nsStart, nsStop;
public double ticksPerRev = DriveConstants.TICKS_PER_REV;
public double maxRpm = DriveConstants.MAX_RPM;
public boolean runUsingEncoder = DriveConstants.RUN_USING_ENCODER;
public double motorP = DriveConstants.MOTOR_VELO_PID.p;
public double motorI = DriveConstants.MOTOR_VELO_PID.i;
public double motorD = DriveConstants.MOTOR_VELO_PID.d;
public double motorF = DriveConstants.MOTOR_VELO_PID.f;
public double wheelRadius = DriveConstants.WHEEL_RADIUS;
public double gearRatio = DriveConstants.GEAR_RATIO;
public double trackWidth = DriveConstants.TRACK_WIDTH;
public double kV = DriveConstants.kV;
public double kA = DriveConstants.kA;
public double kStatic = DriveConstants.kStatic;
public double maxVel = DriveConstants.MAX_VEL;
public double maxAccel = DriveConstants.MAX_ACCEL;
public double maxAngVel = DriveConstants.MAX_ANG_VEL;
public double maxAngAccel = DriveConstants.MAX_ANG_ACCEL;
public double mecTransP = SampleMecanumDrive.TRANSLATIONAL_PID.kP;
public double mecTransI = SampleMecanumDrive.TRANSLATIONAL_PID.kI;
public double mecTransD = SampleMecanumDrive.TRANSLATIONAL_PID.kD;
public double mecHeadingP = SampleMecanumDrive.HEADING_PID.kP;
public double mecHeadingI = SampleMecanumDrive.HEADING_PID.kI;
public double mecHeadingD = SampleMecanumDrive.HEADING_PID.kD;
public double mecLateralMultiplier = SampleMecanumDrive.LATERAL_MULTIPLIER;
public double tankAxialP = SampleTankDrive.AXIAL_PID.kP;
public double tankAxialI = SampleTankDrive.AXIAL_PID.kI;
public double tankAxialD = SampleTankDrive.AXIAL_PID.kD;
public double tankCrossTrackP = SampleTankDrive.CROSS_TRACK_PID.kP;
public double tankCrossTrackI = SampleTankDrive.CROSS_TRACK_PID.kI;
public double tankCrossTrackD = SampleTankDrive.CROSS_TRACK_PID.kD;
public double tankHeadingP = SampleTankDrive.HEADING_PID.kP;
public double tankHeadingI = SampleTankDrive.HEADING_PID.kI;
public double tankHeadingD = SampleTankDrive.HEADING_PID.kD;
public double trackingTicksPerRev = StandardTrackingWheelLocalizer.TICKS_PER_REV;
public double trackingWheelRadius = StandardTrackingWheelLocalizer.WHEEL_RADIUS;
public double trackingGearRatio = StandardTrackingWheelLocalizer.GEAR_RATIO;
public double trackingLateralDistance = StandardTrackingWheelLocalizer.LATERAL_DISTANCE;
public double trackingForwardOffset = StandardTrackingWheelLocalizer.FORWARD_OFFSET;
public RevHubOrientationOnRobot.LogoFacingDirection LOGO_FACING_DIR = DriveConstants.LOGO_FACING_DIR;
public RevHubOrientationOnRobot.UsbFacingDirection USB_FACING_DIR = DriveConstants.USB_FACING_DIR;
public List<Long> nsTimes = new ArrayList<>();
public List<Double> targetXs = new ArrayList<>();
public List<Double> targetYs = new ArrayList<>();
public List<Double> targetHeadings = new ArrayList<>();
public List<Double> xs = new ArrayList<>();
public List<Double> ys = new ArrayList<>();
public List<Double> headings = new ArrayList<>();
public List<Double> voltages = new ArrayList<>();
public List<List<Integer>> driveEncPositions = new ArrayList<>();
public List<List<Integer>> driveEncVels = new ArrayList<>();
public List<List<Integer>> trackingEncPositions = new ArrayList<>();
public List<List<Integer>> trackingEncVels = new ArrayList<>();
public LogFile(String opModeName) {
this.opModeName = opModeName;
}
}
public static void record(
Pose2d targetPose, Pose2d pose, double voltage,
List<Integer> lastDriveEncPositions, List<Integer> lastDriveEncVels, List<Integer> lastTrackingEncPositions, List<Integer> lastTrackingEncVels
) {
long nsTime = System.nanoTime();
if (nsTime - log.nsStart > 3 * 60 * 1_000_000_000L) {
return;
}
log.nsTimes.add(nsTime);
log.targetXs.add(targetPose.getX());
log.targetYs.add(targetPose.getY());
log.targetHeadings.add(targetPose.getHeading());
log.xs.add(pose.getX());
log.ys.add(pose.getY());
log.headings.add(pose.getHeading());
log.voltages.add(voltage);
while (log.driveEncPositions.size() < lastDriveEncPositions.size()) {
log.driveEncPositions.add(new ArrayList<>());
}
while (log.driveEncVels.size() < lastDriveEncVels.size()) {
log.driveEncVels.add(new ArrayList<>());
}
while (log.trackingEncPositions.size() < lastTrackingEncPositions.size()) {
log.trackingEncPositions.add(new ArrayList<>());
}
while (log.trackingEncVels.size() < lastTrackingEncVels.size()) {
log.trackingEncVels.add(new ArrayList<>());
}
for (int i = 0; i < lastDriveEncPositions.size(); i++) {
log.driveEncPositions.get(i).add(lastDriveEncPositions.get(i));
}
for (int i = 0; i < lastDriveEncVels.size(); i++) {
log.driveEncVels.get(i).add(lastDriveEncVels.get(i));
}
for (int i = 0; i < lastTrackingEncPositions.size(); i++) {
log.trackingEncPositions.get(i).add(lastTrackingEncPositions.get(i));
}
for (int i = 0; i < lastTrackingEncVels.size(); i++) {
log.trackingEncVels.get(i).add(lastTrackingEncVels.get(i));
}
}
private static final OpModeManagerNotifier.Notifications notifHandler = new OpModeManagerNotifier.Notifications() {
@SuppressLint("SimpleDateFormat")
final DateFormat dateFormat = new SimpleDateFormat("yyyy_MM_dd__HH_mm_ss_SSS");
final ObjectWriter jsonWriter = new ObjectMapper(new JsonFactory())
.writerWithDefaultPrettyPrinter();
@Override
public void onOpModePreInit(OpMode opMode) {
log = new LogFile(opMode.getClass().getCanonicalName());
// clean up old files
File[] fs = Objects.requireNonNull(ROOT.listFiles());
Arrays.sort(fs, (a, b) -> Long.compare(a.lastModified(), b.lastModified()));
long totalSizeBytes = 0;
for (File f : fs) {
totalSizeBytes += f.length();
}
int i = 0;
while (i < fs.length && totalSizeBytes >= 32 * 1000 * 1000) {
totalSizeBytes -= fs[i].length();
if (!fs[i].delete()) {
RobotLog.setGlobalErrorMsg("Unable to delete file " + fs[i].getAbsolutePath());
}
++i;
}
}
@Override
public void onOpModePreStart(OpMode opMode) {
log.nsStart = System.nanoTime();
}
@Override
public void onOpModePostStop(OpMode opMode) {
log.nsStop = System.nanoTime();
if (!(opMode instanceof OpModeManagerImpl.DefaultOpMode)) {
//noinspection ResultOfMethodCallIgnored
ROOT.mkdirs();
String filename = dateFormat.format(new Date(log.msInit)) + "__" + opMode.getClass().getSimpleName() + ".json";
File file = new File(ROOT, filename);
try {
jsonWriter.writeValue(file, log);
} catch (IOException e) {
RobotLog.setGlobalErrorMsg(new RuntimeException(e),
"Unable to write data to " + file.getAbsolutePath());
}
}
}
};
@WebHandlerRegistrar
public static void registerRoutes(Context context, WebHandlerManager manager) {
//noinspection ResultOfMethodCallIgnored
ROOT.mkdirs();
// op mode manager only stores a weak reference, so we need to keep notifHandler alive ourselves
// don't use @OnCreateEventLoop because it's unreliable
OpModeManagerImpl.getOpModeManagerOfActivity(
AppUtil.getInstance().getActivity()
).registerListener(notifHandler);
manager.register("/logs", session -> {
final StringBuilder sb = new StringBuilder();
sb.append("<!doctype html><html><head><title>Logs</title></head><body><ul>");
File[] fs = Objects.requireNonNull(ROOT.listFiles());
Arrays.sort(fs, (a, b) -> Long.compare(b.lastModified(), a.lastModified()));
for (File f : fs) {
sb.append("<li><a href=\"/logs/download?file=");
sb.append(f.getName());
sb.append("\" download=\"");
sb.append(f.getName());
sb.append("\">");
sb.append(f.getName());
sb.append("</a></li>");
}
sb.append("</ul></body></html>");
return NanoHTTPD.newFixedLengthResponse(NanoHTTPD.Response.Status.OK,
NanoHTTPD.MIME_HTML, sb.toString());
});
manager.register("/logs/download", session -> {
final String[] pairs = session.getQueryParameterString().split("&");
if (pairs.length != 1) {
return NanoHTTPD.newFixedLengthResponse(NanoHTTPD.Response.Status.BAD_REQUEST,
NanoHTTPD.MIME_PLAINTEXT, "expected one query parameter, got " + pairs.length);
}
final String[] parts = pairs[0].split("=");
if (!parts[0].equals("file")) {
return NanoHTTPD.newFixedLengthResponse(NanoHTTPD.Response.Status.BAD_REQUEST,
NanoHTTPD.MIME_PLAINTEXT, "expected file query parameter, got " + parts[0]);
}
File f = new File(ROOT, parts[1]);
if (!f.exists()) {
return NanoHTTPD.newFixedLengthResponse(NanoHTTPD.Response.Status.NOT_FOUND,
NanoHTTPD.MIME_PLAINTEXT, "file " + f + " doesn't exist");
}
return NanoHTTPD.newChunkedResponse(NanoHTTPD.Response.Status.OK,
"application/json", new FileInputStream(f));
});
}
}

View File

@ -0,0 +1,60 @@
package org.firstinspires.ftc.teamcode.util;
import org.firstinspires.ftc.robotcore.internal.system.AppUtil;
import java.io.File;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
/**
* Utility functions for log files.
*/
public class LoggingUtil {
public static final File ROAD_RUNNER_FOLDER =
new File(AppUtil.ROOT_FOLDER + "/RoadRunner/");
private static final long LOG_QUOTA = 25 * 1024 * 1024; // 25MB log quota for now
private static void buildLogList(List<File> logFiles, File dir) {
for (File file : dir.listFiles()) {
if (file.isDirectory()) {
buildLogList(logFiles, file);
} else {
logFiles.add(file);
}
}
}
private static void pruneLogsIfNecessary() {
List<File> logFiles = new ArrayList<>();
buildLogList(logFiles, ROAD_RUNNER_FOLDER);
Collections.sort(logFiles, (lhs, rhs) ->
Long.compare(lhs.lastModified(), rhs.lastModified()));
long dirSize = 0;
for (File file: logFiles) {
dirSize += file.length();
}
while (dirSize > LOG_QUOTA) {
if (logFiles.size() == 0) break;
File fileToRemove = logFiles.remove(0);
dirSize -= fileToRemove.length();
//noinspection ResultOfMethodCallIgnored
fileToRemove.delete();
}
}
/**
* Obtain a log file with the provided name
*/
public static File getLogFile(String name) {
//noinspection ResultOfMethodCallIgnored
ROAD_RUNNER_FOLDER.mkdirs();
pruneLogsIfNecessary();
return new File(ROAD_RUNNER_FOLDER, name);
}
}

View File

@ -0,0 +1,124 @@
package org.firstinspires.ftc.teamcode.util;
import com.qualcomm.hardware.lynx.LynxModule;
import com.qualcomm.robotcore.hardware.HardwareMap;
import org.firstinspires.ftc.robotcore.internal.system.Misc;
import java.util.HashMap;
import java.util.Map;
/**
* Collection of utilites for interacting with Lynx modules.
*/
public class LynxModuleUtil {
private static final LynxFirmwareVersion MIN_VERSION = new LynxFirmwareVersion(1, 8, 2);
/**
* Parsed representation of a Lynx module firmware version.
*/
public static class LynxFirmwareVersion implements Comparable<LynxFirmwareVersion> {
public final int major;
public final int minor;
public final int eng;
public LynxFirmwareVersion(int major, int minor, int eng) {
this.major = major;
this.minor = minor;
this.eng = eng;
}
@Override
public boolean equals(Object other) {
if (other instanceof LynxFirmwareVersion) {
LynxFirmwareVersion otherVersion = (LynxFirmwareVersion) other;
return major == otherVersion.major && minor == otherVersion.minor &&
eng == otherVersion.eng;
} else {
return false;
}
}
@Override
public int compareTo(LynxFirmwareVersion other) {
int majorComp = Integer.compare(major, other.major);
if (majorComp == 0) {
int minorComp = Integer.compare(minor, other.minor);
if (minorComp == 0) {
return Integer.compare(eng, other.eng);
} else {
return minorComp;
}
} else {
return majorComp;
}
}
@Override
public String toString() {
return Misc.formatInvariant("%d.%d.%d", major, minor, eng);
}
}
/**
* Retrieve and parse Lynx module firmware version.
* @param module Lynx module
* @return parsed firmware version
*/
public static LynxFirmwareVersion getFirmwareVersion(LynxModule module) {
String versionString = module.getNullableFirmwareVersionString();
if (versionString == null) {
return null;
}
String[] parts = versionString.split("[ :,]+");
try {
// note: for now, we ignore the hardware entry
return new LynxFirmwareVersion(
Integer.parseInt(parts[3]),
Integer.parseInt(parts[5]),
Integer.parseInt(parts[7])
);
} catch (NumberFormatException e) {
return null;
}
}
/**
* Exception indicating an outdated Lynx firmware version.
*/
public static class LynxFirmwareVersionException extends RuntimeException {
public LynxFirmwareVersionException(String detailMessage) {
super(detailMessage);
}
}
/**
* Ensure all of the Lynx modules attached to the robot satisfy the minimum requirement.
* @param hardwareMap hardware map containing Lynx modules
*/
public static void ensureMinimumFirmwareVersion(HardwareMap hardwareMap) {
Map<String, LynxFirmwareVersion> outdatedModules = new HashMap<>();
for (LynxModule module : hardwareMap.getAll(LynxModule.class)) {
LynxFirmwareVersion version = getFirmwareVersion(module);
if (version == null || version.compareTo(MIN_VERSION) < 0) {
for (String name : hardwareMap.getNamesOf(module)) {
outdatedModules.put(name, version);
}
}
}
if (outdatedModules.size() > 0) {
StringBuilder msgBuilder = new StringBuilder();
msgBuilder.append("One or more of the attached Lynx modules has outdated firmware\n");
msgBuilder.append(Misc.formatInvariant("Mandatory minimum firmware version for Road Runner: %s\n",
MIN_VERSION.toString()));
for (Map.Entry<String, LynxFirmwareVersion> entry : outdatedModules.entrySet()) {
msgBuilder.append(Misc.formatInvariant(
"\t%s: %s\n", entry.getKey(),
entry.getValue() == null ? "Unknown" : entry.getValue().toString()));
}
throw new LynxFirmwareVersionException(msgBuilder.toString());
}
}
}

View File

@ -0,0 +1,156 @@
package org.firstinspires.ftc.teamcode.util;
import androidx.annotation.Nullable;
import com.acmerobotics.roadrunner.kinematics.Kinematics;
import org.apache.commons.math3.stat.regression.SimpleRegression;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
/**
* Various regression utilities.
*/
public class RegressionUtil {
/**
* Feedforward parameter estimates from the ramp regression and additional summary statistics
*/
public static class RampResult {
public final double kV, kStatic, rSquare;
public RampResult(double kV, double kStatic, double rSquare) {
this.kV = kV;
this.kStatic = kStatic;
this.rSquare = rSquare;
}
}
/**
* Feedforward parameter estimates from the ramp regression and additional summary statistics
*/
public static class AccelResult {
public final double kA, rSquare;
public AccelResult(double kA, double rSquare) {
this.kA = kA;
this.rSquare = rSquare;
}
}
/**
* Numerically compute dy/dx from the given x and y values. The returned list is padded to match
* the length of the original sequences.
*
* @param x x-values
* @param y y-values
* @return derivative values
*/
private static List<Double> numericalDerivative(List<Double> x, List<Double> y) {
List<Double> deriv = new ArrayList<>(x.size());
for (int i = 1; i < x.size() - 1; i++) {
deriv.add(
(y.get(i + 1) - y.get(i - 1)) /
(x.get(i + 1) - x.get(i - 1))
);
}
// copy endpoints to pad output
deriv.add(0, deriv.get(0));
deriv.add(deriv.get(deriv.size() - 1));
return deriv;
}
/**
* Run regression to compute velocity and static feedforward from ramp test data.
*
* Here's the general procedure for gathering the requisite data:
* 1. Slowly ramp the motor power/voltage and record encoder values along the way.
* 2. Run a linear regression on the encoder velocity vs. motor power plot to obtain a slope
* (kV) and an optional intercept (kStatic).
*
* @param timeSamples time samples
* @param positionSamples position samples
* @param powerSamples power samples
* @param fitStatic fit kStatic
* @param file log file
*/
public static RampResult fitRampData(List<Double> timeSamples, List<Double> positionSamples,
List<Double> powerSamples, boolean fitStatic,
@Nullable File file) {
if (file != null) {
try (PrintWriter pw = new PrintWriter(file)) {
pw.println("time,position,power");
for (int i = 0; i < timeSamples.size(); i++) {
double time = timeSamples.get(i);
double pos = positionSamples.get(i);
double power = powerSamples.get(i);
pw.println(time + "," + pos + "," + power);
}
} catch (FileNotFoundException e) {
// ignore
}
}
List<Double> velSamples = numericalDerivative(timeSamples, positionSamples);
SimpleRegression rampReg = new SimpleRegression(fitStatic);
for (int i = 0; i < timeSamples.size(); i++) {
double vel = velSamples.get(i);
double power = powerSamples.get(i);
rampReg.addData(vel, power);
}
return new RampResult(Math.abs(rampReg.getSlope()), Math.abs(rampReg.getIntercept()),
rampReg.getRSquare());
}
/**
* Run regression to compute acceleration feedforward.
*
* @param timeSamples time samples
* @param positionSamples position samples
* @param powerSamples power samples
* @param rampResult ramp result
* @param file log file
*/
public static AccelResult fitAccelData(List<Double> timeSamples, List<Double> positionSamples,
List<Double> powerSamples, RampResult rampResult,
@Nullable File file) {
if (file != null) {
try (PrintWriter pw = new PrintWriter(file)) {
pw.println("time,position,power");
for (int i = 0; i < timeSamples.size(); i++) {
double time = timeSamples.get(i);
double pos = positionSamples.get(i);
double power = powerSamples.get(i);
pw.println(time + "," + pos + "," + power);
}
} catch (FileNotFoundException e) {
// ignore
}
}
List<Double> velSamples = numericalDerivative(timeSamples, positionSamples);
List<Double> accelSamples = numericalDerivative(timeSamples, velSamples);
SimpleRegression accelReg = new SimpleRegression(false);
for (int i = 0; i < timeSamples.size(); i++) {
double vel = velSamples.get(i);
double accel = accelSamples.get(i);
double power = powerSamples.get(i);
double powerFromVel = Kinematics.calculateMotorFeedforward(
vel, 0.0, rampResult.kV, 0.0, rampResult.kStatic);
double powerFromAccel = power - powerFromVel;
accelReg.addData(accel, powerFromAccel);
}
return new AccelResult(Math.abs(accelReg.getSlope()), accelReg.getRSquare());
}
}

View File

@ -54,7 +54,7 @@ android {
defaultConfig {
signingConfig signingConfigs.debug
applicationId 'com.qualcomm.ftcrobotcontroller'
minSdkVersion 23
minSdkVersion 24
//noinspection ExpiredTargetSdkVersion
targetSdkVersion 28
@ -122,8 +122,5 @@ android {
}
repositories {
flatDir {
dirs rootProject.file('libs')
}
}

View File

@ -1,22 +1,23 @@
repositories {
mavenCentral()
google() // Needed for androidx
flatDir {
dirs rootProject.file('libs')
}
maven { url = 'https://maven.brott.dev/' }
}
dependencies {
implementation 'org.firstinspires.ftc:Inspection:8.0.0'
implementation 'org.firstinspires.ftc:Blocks:8.0.0'
implementation 'org.firstinspires.ftc:Tfod:8.0.0'
implementation 'org.firstinspires.ftc:RobotCore:8.0.0'
implementation 'org.firstinspires.ftc:RobotServer:8.0.0'
implementation 'org.firstinspires.ftc:OnBotJava:8.0.0'
implementation 'org.firstinspires.ftc:Hardware:8.0.0'
implementation 'org.firstinspires.ftc:FtcCommon:8.0.0'
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.2.0'
implementation 'org.firstinspires.ftc:Inspection:8.2.0'
implementation 'org.firstinspires.ftc:Blocks:8.2.0'
implementation 'org.firstinspires.ftc:Tfod:8.2.0'
implementation 'org.firstinspires.ftc:RobotCore:8.2.0'
implementation 'org.firstinspires.ftc:RobotServer:8.2.0'
implementation 'org.firstinspires.ftc:OnBotJava:8.2.0'
implementation 'org.firstinspires.ftc:Hardware:8.2.0'
implementation 'org.firstinspires.ftc:FtcCommon:8.2.0'
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.4.3'
runtimeOnly 'org.tensorflow:tensorflow-lite:2.12.0'
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'org.firstinspires.ftc:gameAssets-PowerPlay:1.0.0'
implementation 'org.firstinspires.ftc:Vision:8.2.0'
implementation 'com.acmerobotics.dashboard:dashboard:0.4.5'
}

View File

@ -10,7 +10,8 @@ buildscript {
google()
}
dependencies {
classpath 'com.android.tools.build:gradle:7.2.0'
// Note for FTC Teams: Do not modify this yourself.
classpath 'com.android.tools.build:gradle:8.1.0'
}
}
@ -25,8 +26,4 @@ allprojects {
repositories {
mavenCentral()
flatDir {
dirs '../libs'
}
}

View File

@ -5,3 +5,9 @@ android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
# Allow Gradle to use up to 1 GB of RAM
org.gradle.jvmargs=-Xmx1024M
android.defaults.buildfeatures.buildconfig=true
android.nonTransitiveRClass=false
android.nonFinalResIds=false

View File

@ -1,5 +1,5 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.4.2-bin.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.0-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists