Compare commits
8 Commits
Author | SHA1 | Date | |
---|---|---|---|
660a2f63bc | |||
5c8b4c448b | |||
0879b4797f | |||
3383440328 | |||
501c7c0e55 | |||
35d4aa7192 | |||
2390680ce6 | |||
e0282fcbd3 |
8
.github/CONTRIBUTING.md
vendored
8
.github/CONTRIBUTING.md
vendored
@ -18,7 +18,7 @@ If what you've read so far makes little sense, there are some very good git lear
|
||||
[Git Book](https://git-scm.com/book/en/v2)
|
||||
[Interactive Git Tutorial](https://try.github.io)
|
||||
|
||||
##### Guidlines for experienced GIT users.
|
||||
### Guidlines for experienced GIT users.
|
||||
|
||||
If you are absolutely certain that you want to push the big green button above, read on. Otherwise back _slowly away from keyboard_.
|
||||
|
||||
@ -38,9 +38,9 @@ This section guides you through filing a bug report. The better the report the
|
||||
|
||||
#### Before submitting a bug report
|
||||
|
||||
- Check the [forums](http://ftcforum.usfirst.org/forum.php) to see if someone else has run into the problem and whether there is an official solution that doesn't require a new SDK.
|
||||
- Check the [forums](http://ftcforum.firstinspires.org/forum.php) to see if someone else has run into the problem and whether there is an official solution that doesn't require a new SDK.
|
||||
|
||||
- Perform a search of current [issues](https://github.com/ftctechnh/ftc_app/issues) to see if the problem has already been reported. If so, add a comment to the existing issue instead of creating a new one.
|
||||
- Perform a search of current [issues](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues) to see if the problem has already been reported. If so, add a comment to the existing issue instead of creating a new one.
|
||||
|
||||
#### How Do I Submit A (Good) Bug Report?
|
||||
|
||||
@ -65,4 +65,4 @@ FIRST volunteers are awesome. You all have great ideas and we want to hear them
|
||||
|
||||
Enhancements should be broadly applicable to a large majority of teams, should not force teams to change their workflow, and should provide real value to the mission of FIRST as it relates to engaging youth in engineering activities.
|
||||
|
||||
The best way to get momentum behind new features is to post a description of your idea in the forums. Build community support for it. The FTC Technology Team monitors the forums. We'll hear you and if there's a large enough call for the feature it's very likely to get put on the list for a future release.
|
||||
The best way to get momentum behind new features is to post a description of your idea in the discussions section of this repository. Build community support for it. The FTC Technology Team monitors the discussions. We'll hear you and if there's a large enough call for the feature it's very likely to get put on the list for a future release.
|
||||
|
20
.gitignore
vendored
20
.gitignore
vendored
@ -9,8 +9,9 @@
|
||||
# Files for the ART/Dalvik VM
|
||||
*.dex
|
||||
|
||||
# Java class files
|
||||
# Java/JDK files
|
||||
*.class
|
||||
*.hprof
|
||||
|
||||
# Generated files
|
||||
bin/
|
||||
@ -40,17 +41,10 @@ captures/
|
||||
|
||||
# IntelliJ
|
||||
*.iml
|
||||
.idea/workspace.xml
|
||||
.idea/tasks.xml
|
||||
.idea/gradle.xml
|
||||
.idea/assetWizardSettings.xml
|
||||
.idea/dictionaries
|
||||
.idea/libraries
|
||||
# Android Studio 3 in .gitignore file.
|
||||
.idea/caches
|
||||
.idea/modules.xml
|
||||
# Comment next line if keeping position of elements in Navigation Editor is relevant for you
|
||||
.idea/navEditor.xml
|
||||
.idea/
|
||||
|
||||
# For Mac users
|
||||
.DS_Store
|
||||
|
||||
# Keystore files
|
||||
# Uncomment the following lines if you do not want to check your keystore files in.
|
||||
@ -84,4 +78,4 @@ lint/intermediates/
|
||||
lint/generated/
|
||||
lint/outputs/
|
||||
lint/tmp/
|
||||
# lint/reports/
|
||||
# lint/reports/
|
@ -8,7 +8,7 @@ apply plugin: 'com.android.library'
|
||||
android {
|
||||
|
||||
defaultConfig {
|
||||
minSdkVersion 23
|
||||
minSdkVersion 24
|
||||
//noinspection ExpiredTargetSdkVersion
|
||||
targetSdkVersion 28
|
||||
buildConfigField "String", "APP_BUILD_TIME", '"' + (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(new Date())) + '"'
|
||||
@ -17,8 +17,8 @@ android {
|
||||
compileSdkVersion 29
|
||||
|
||||
compileOptions {
|
||||
sourceCompatibility JavaVersion.VERSION_1_7
|
||||
targetCompatibility JavaVersion.VERSION_1_7
|
||||
sourceCompatibility JavaVersion.VERSION_1_8
|
||||
targetCompatibility JavaVersion.VERSION_1_8
|
||||
}
|
||||
namespace = 'com.qualcomm.ftcrobotcontroller'
|
||||
}
|
||||
|
@ -1,15 +1,14 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:versionCode="45"
|
||||
android:versionName="7.2">
|
||||
android:versionCode="50"
|
||||
android:versionName="8.2">
|
||||
|
||||
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
|
||||
|
||||
<application
|
||||
android:allowBackup="true"
|
||||
android:largeHeap="true"
|
||||
android:extractNativeLibs="true"
|
||||
android:icon="@drawable/ic_launcher"
|
||||
android:label="@string/app_name"
|
||||
android:theme="@style/AppThemeRedRC"
|
||||
|
0
FtcRobotController/src/main/assets/qcar_config.xsd
Normal file
0
FtcRobotController/src/main/assets/qcar_config.xsd
Normal file
@ -0,0 +1,192 @@
|
||||
/* Copyright (c) 2023 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import java.util.List;
|
||||
import android.util.Size;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagGameDatabase;
|
||||
|
||||
/**
|
||||
* This 2023-2024 OpMode illustrates the basics of AprilTag recognition and pose estimation,
|
||||
* including Java Builder structures for specifying Vision parameters.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*/
|
||||
@TeleOp(name = "Concept: AprilTag", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptAprilTag extends LinearOpMode {
|
||||
|
||||
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
|
||||
|
||||
/**
|
||||
* {@link #aprilTag} is the variable to store our instance of the AprilTag processor.
|
||||
*/
|
||||
private AprilTagProcessor aprilTag;
|
||||
|
||||
/**
|
||||
* {@link #visionPortal} is the variable to store our instance of the vision portal.
|
||||
*/
|
||||
private VisionPortal visionPortal;
|
||||
|
||||
@Override
|
||||
public void runOpMode() {
|
||||
|
||||
initAprilTag();
|
||||
|
||||
// Wait for the DS start button to be touched.
|
||||
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
|
||||
telemetry.addData(">", "Touch Play to start OpMode");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
if (opModeIsActive()) {
|
||||
while (opModeIsActive()) {
|
||||
|
||||
telemetryAprilTag();
|
||||
|
||||
// Push telemetry to the Driver Station.
|
||||
telemetry.update();
|
||||
|
||||
// Save CPU resources; can resume streaming when needed.
|
||||
if (gamepad1.dpad_down) {
|
||||
visionPortal.stopStreaming();
|
||||
} else if (gamepad1.dpad_up) {
|
||||
visionPortal.resumeStreaming();
|
||||
}
|
||||
|
||||
// Share the CPU.
|
||||
sleep(20);
|
||||
}
|
||||
}
|
||||
|
||||
// Save more CPU resources when camera is no longer needed.
|
||||
visionPortal.close();
|
||||
|
||||
} // end method runOpMode()
|
||||
|
||||
/**
|
||||
* Initialize the AprilTag processor.
|
||||
*/
|
||||
private void initAprilTag() {
|
||||
|
||||
// Create the AprilTag processor.
|
||||
aprilTag = new AprilTagProcessor.Builder()
|
||||
//.setDrawAxes(false)
|
||||
//.setDrawCubeProjection(false)
|
||||
//.setDrawTagOutline(true)
|
||||
//.setTagFamily(AprilTagProcessor.TagFamily.TAG_36h11)
|
||||
//.setTagLibrary(AprilTagGameDatabase.getCenterStageTagLibrary())
|
||||
//.setOutputUnits(DistanceUnit.INCH, AngleUnit.DEGREES)
|
||||
|
||||
// == CAMERA CALIBRATION ==
|
||||
// If you do not manually specify calibration parameters, the SDK will attempt
|
||||
// to load a predefined calibration for your camera.
|
||||
//.setLensIntrinsics(578.272, 578.272, 402.145, 221.506)
|
||||
|
||||
// ... these parameters are fx, fy, cx, cy.
|
||||
|
||||
.build();
|
||||
|
||||
// Create the vision portal by using a builder.
|
||||
VisionPortal.Builder builder = new VisionPortal.Builder();
|
||||
|
||||
// Set the camera (webcam vs. built-in RC phone camera).
|
||||
if (USE_WEBCAM) {
|
||||
builder.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"));
|
||||
} else {
|
||||
builder.setCamera(BuiltinCameraDirection.BACK);
|
||||
}
|
||||
|
||||
// Choose a camera resolution. Not all cameras support all resolutions.
|
||||
//builder.setCameraResolution(new Size(640, 480));
|
||||
|
||||
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
|
||||
//builder.enableCameraMonitoring(true);
|
||||
|
||||
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
|
||||
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
|
||||
|
||||
// Choose whether or not LiveView stops if no processors are enabled.
|
||||
// If set "true", monitor shows solid orange screen if no processors enabled.
|
||||
// If set "false", monitor shows camera view without annotations.
|
||||
//builder.setAutoStopLiveView(false);
|
||||
|
||||
// Set and enable the processor.
|
||||
builder.addProcessor(aprilTag);
|
||||
|
||||
// Build the Vision Portal, using the above settings.
|
||||
visionPortal = builder.build();
|
||||
|
||||
// Disable or re-enable the aprilTag processor at any time.
|
||||
//visionPortal.setProcessorEnabled(aprilTag, true);
|
||||
|
||||
} // end method initAprilTag()
|
||||
|
||||
|
||||
/**
|
||||
* Function to add telemetry about AprilTag detections.
|
||||
*/
|
||||
private void telemetryAprilTag() {
|
||||
|
||||
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
|
||||
telemetry.addData("# AprilTags Detected", currentDetections.size());
|
||||
|
||||
// Step through the list of detections and display info for each one.
|
||||
for (AprilTagDetection detection : currentDetections) {
|
||||
if (detection.metadata != null) {
|
||||
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
|
||||
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
|
||||
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
|
||||
telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
|
||||
} else {
|
||||
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
|
||||
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
|
||||
}
|
||||
} // end for() loop
|
||||
|
||||
// Add "key" information to telemetry
|
||||
telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
|
||||
telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");
|
||||
telemetry.addLine("RBE = Range, Bearing & Elevation");
|
||||
|
||||
} // end method telemetryAprilTag()
|
||||
|
||||
} // end class
|
@ -0,0 +1,148 @@
|
||||
/* Copyright (c) 2023 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import java.util.List;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
|
||||
|
||||
/**
|
||||
* This 2023-2024 OpMode illustrates the basics of AprilTag recognition and pose estimation, using
|
||||
* the easy way.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*/
|
||||
@TeleOp(name = "Concept: AprilTag Easy", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptAprilTagEasy extends LinearOpMode {
|
||||
|
||||
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
|
||||
|
||||
/**
|
||||
* {@link #aprilTag} is the variable to store our instance of the AprilTag processor.
|
||||
*/
|
||||
private AprilTagProcessor aprilTag;
|
||||
|
||||
/**
|
||||
* {@link #visionPortal} is the variable to store our instance of the vision portal.
|
||||
*/
|
||||
private VisionPortal visionPortal;
|
||||
|
||||
@Override
|
||||
public void runOpMode() {
|
||||
|
||||
initAprilTag();
|
||||
|
||||
// Wait for the DS start button to be touched.
|
||||
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
|
||||
telemetry.addData(">", "Touch Play to start OpMode");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
if (opModeIsActive()) {
|
||||
while (opModeIsActive()) {
|
||||
|
||||
telemetryAprilTag();
|
||||
|
||||
// Push telemetry to the Driver Station.
|
||||
telemetry.update();
|
||||
|
||||
// Save CPU resources; can resume streaming when needed.
|
||||
if (gamepad1.dpad_down) {
|
||||
visionPortal.stopStreaming();
|
||||
} else if (gamepad1.dpad_up) {
|
||||
visionPortal.resumeStreaming();
|
||||
}
|
||||
|
||||
// Share the CPU.
|
||||
sleep(20);
|
||||
}
|
||||
}
|
||||
|
||||
// Save more CPU resources when camera is no longer needed.
|
||||
visionPortal.close();
|
||||
|
||||
} // end method runOpMode()
|
||||
|
||||
/**
|
||||
* Initialize the AprilTag processor.
|
||||
*/
|
||||
private void initAprilTag() {
|
||||
|
||||
// Create the AprilTag processor the easy way.
|
||||
aprilTag = AprilTagProcessor.easyCreateWithDefaults();
|
||||
|
||||
// Create the vision portal the easy way.
|
||||
if (USE_WEBCAM) {
|
||||
visionPortal = VisionPortal.easyCreateWithDefaults(
|
||||
hardwareMap.get(WebcamName.class, "Webcam 1"), aprilTag);
|
||||
} else {
|
||||
visionPortal = VisionPortal.easyCreateWithDefaults(
|
||||
BuiltinCameraDirection.BACK, aprilTag);
|
||||
}
|
||||
|
||||
} // end method initAprilTag()
|
||||
|
||||
/**
|
||||
* Function to add telemetry about AprilTag detections.
|
||||
*/
|
||||
private void telemetryAprilTag() {
|
||||
|
||||
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
|
||||
telemetry.addData("# AprilTags Detected", currentDetections.size());
|
||||
|
||||
// Step through the list of detections and display info for each one.
|
||||
for (AprilTagDetection detection : currentDetections) {
|
||||
if (detection.metadata != null) {
|
||||
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
|
||||
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
|
||||
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
|
||||
telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
|
||||
} else {
|
||||
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
|
||||
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
|
||||
}
|
||||
} // end for() loop
|
||||
|
||||
// Add "key" information to telemetry
|
||||
telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
|
||||
telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");
|
||||
telemetry.addLine("RBE = Range, Bearing & Elevation");
|
||||
|
||||
} // end method telemetryAprilTag()
|
||||
|
||||
} // end class
|
@ -0,0 +1,248 @@
|
||||
/* Copyright (c) 2023 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.util.Range;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.ExposureControl;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.GainControl;
|
||||
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* This OpMode determines the best Exposure for minimizing image motion-blur on a Webcam
|
||||
* Note that it is not possible to control the exposure for a Phone Camera, so if you are using a Phone for the Robot Controller
|
||||
* this OpMode/Feature only applies to an externally connected Webcam
|
||||
*
|
||||
* The goal is to determine the smallest (shortest) Exposure value that still provides reliable Tag Detection.
|
||||
* Starting with the minimum Exposure and maximum Gain, the exposure is slowly increased until the Tag is
|
||||
* detected reliably from the likely operational distance.
|
||||
*
|
||||
*
|
||||
* The best way to run this optimization is to view the camera preview screen while changing the exposure and gains.
|
||||
*
|
||||
* To do this, you need to view the RobotController screen directly (not from Driver Station)
|
||||
* This can be done directly from a RC phone screen (if you are using an external Webcam), but for a Control Hub you must either plug an
|
||||
* HDMI monitor into the Control Hub HDMI port, or use an external viewer program like ScrCpy (https://scrcpy.org/)
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into the TeamCode/src/main/java/org/firstinspires/ftc/teamcode folder.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*/
|
||||
|
||||
@TeleOp(name="Optimize AprilTag Exposure", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptAprilTagOptimizeExposure extends LinearOpMode
|
||||
{
|
||||
private VisionPortal visionPortal = null; // Used to manage the video source.
|
||||
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
|
||||
private int myExposure ;
|
||||
private int minExposure ;
|
||||
private int maxExposure ;
|
||||
private int myGain ;
|
||||
private int minGain ;
|
||||
private int maxGain ;
|
||||
|
||||
boolean thisExpUp = false;
|
||||
boolean thisExpDn = false;
|
||||
boolean thisGainUp = false;
|
||||
boolean thisGainDn = false;
|
||||
|
||||
boolean lastExpUp = false;
|
||||
boolean lastExpDn = false;
|
||||
boolean lastGainUp = false;
|
||||
boolean lastGainDn = false;
|
||||
@Override public void runOpMode()
|
||||
{
|
||||
// Initialize the Apriltag Detection process
|
||||
initAprilTag();
|
||||
|
||||
// Establish Min and Max Gains and Exposure. Then set a low exposure with high gain
|
||||
getCameraSetting();
|
||||
myExposure = Math.min(5, minExposure);
|
||||
myGain = maxGain;
|
||||
setManualExposure(myExposure, myGain);
|
||||
|
||||
// Wait for the match to begin.
|
||||
telemetry.addData("Camera preview on/off", "3 dots, Camera Stream");
|
||||
telemetry.addData(">", "Touch Play to start OpMode");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
while (opModeIsActive())
|
||||
{
|
||||
telemetry.addLine("Find lowest Exposure that gives reliable detection.");
|
||||
telemetry.addLine("Use Left bump/trig to adjust Exposure.");
|
||||
telemetry.addLine("Use Right bump/trig to adjust Gain.\n");
|
||||
|
||||
// Display how many Tags Detected
|
||||
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
|
||||
int numTags = currentDetections.size();
|
||||
if (numTags > 0 )
|
||||
telemetry.addData("Tag", "####### %d Detected ######", currentDetections.size());
|
||||
else
|
||||
telemetry.addData("Tag", "----------- none - ----------");
|
||||
|
||||
telemetry.addData("Exposure","%d (%d - %d)", myExposure, minExposure, maxExposure);
|
||||
telemetry.addData("Gain","%d (%d - %d)", myGain, minGain, maxGain);
|
||||
telemetry.update();
|
||||
|
||||
// check to see if we need to change exposure or gain.
|
||||
thisExpUp = gamepad1.left_bumper;
|
||||
thisExpDn = gamepad1.left_trigger > 0.25;
|
||||
thisGainUp = gamepad1.right_bumper;
|
||||
thisGainDn = gamepad1.right_trigger > 0.25;
|
||||
|
||||
// look for clicks to change exposure
|
||||
if (thisExpUp && !lastExpUp) {
|
||||
myExposure = Range.clip(myExposure + 1, minExposure, maxExposure);
|
||||
setManualExposure(myExposure, myGain);
|
||||
} else if (thisExpDn && !lastExpDn) {
|
||||
myExposure = Range.clip(myExposure - 1, minExposure, maxExposure);
|
||||
setManualExposure(myExposure, myGain);
|
||||
}
|
||||
|
||||
// look for clicks to change the gain
|
||||
if (thisGainUp && !lastGainUp) {
|
||||
myGain = Range.clip(myGain + 1, minGain, maxGain );
|
||||
setManualExposure(myExposure, myGain);
|
||||
} else if (thisGainDn && !lastGainDn) {
|
||||
myGain = Range.clip(myGain - 1, minGain, maxGain );
|
||||
setManualExposure(myExposure, myGain);
|
||||
}
|
||||
|
||||
lastExpUp = thisExpUp;
|
||||
lastExpDn = thisExpDn;
|
||||
lastGainUp = thisGainUp;
|
||||
lastGainDn = thisGainDn;
|
||||
|
||||
sleep(20);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the AprilTag processor.
|
||||
*/
|
||||
private void initAprilTag() {
|
||||
// Create the AprilTag processor by using a builder.
|
||||
aprilTag = new AprilTagProcessor.Builder().build();
|
||||
|
||||
// Create the WEBCAM vision portal by using a builder.
|
||||
visionPortal = new VisionPortal.Builder()
|
||||
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||
.addProcessor(aprilTag)
|
||||
.build();
|
||||
}
|
||||
|
||||
/*
|
||||
Manually set the camera gain and exposure.
|
||||
Can only be called AFTER calling initAprilTag();
|
||||
Returns true if controls are set.
|
||||
*/
|
||||
private boolean setManualExposure(int exposureMS, int gain) {
|
||||
// Ensure Vision Portal has been setup.
|
||||
if (visionPortal == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Wait for the camera to be open
|
||||
if (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING) {
|
||||
telemetry.addData("Camera", "Waiting");
|
||||
telemetry.update();
|
||||
while (!isStopRequested() && (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING)) {
|
||||
sleep(20);
|
||||
}
|
||||
telemetry.addData("Camera", "Ready");
|
||||
telemetry.update();
|
||||
}
|
||||
|
||||
// Set camera controls unless we are stopping.
|
||||
if (!isStopRequested())
|
||||
{
|
||||
// Set exposure. Make sure we are in Manual Mode for these values to take effect.
|
||||
ExposureControl exposureControl = visionPortal.getCameraControl(ExposureControl.class);
|
||||
if (exposureControl.getMode() != ExposureControl.Mode.Manual) {
|
||||
exposureControl.setMode(ExposureControl.Mode.Manual);
|
||||
sleep(50);
|
||||
}
|
||||
exposureControl.setExposure((long)exposureMS, TimeUnit.MILLISECONDS);
|
||||
sleep(20);
|
||||
|
||||
// Set Gain.
|
||||
GainControl gainControl = visionPortal.getCameraControl(GainControl.class);
|
||||
gainControl.setGain(gain);
|
||||
sleep(20);
|
||||
return (true);
|
||||
} else {
|
||||
return (false);
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Read this camera's minimum and maximum Exposure and Gain settings.
|
||||
Can only be called AFTER calling initAprilTag();
|
||||
*/
|
||||
private void getCameraSetting() {
|
||||
// Ensure Vision Portal has been setup.
|
||||
if (visionPortal == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Wait for the camera to be open
|
||||
if (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING) {
|
||||
telemetry.addData("Camera", "Waiting");
|
||||
telemetry.update();
|
||||
while (!isStopRequested() && (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING)) {
|
||||
sleep(20);
|
||||
}
|
||||
telemetry.addData("Camera", "Ready");
|
||||
telemetry.update();
|
||||
}
|
||||
|
||||
// Get camera control values unless we are stopping.
|
||||
if (!isStopRequested()) {
|
||||
ExposureControl exposureControl = visionPortal.getCameraControl(ExposureControl.class);
|
||||
minExposure = (int)exposureControl.getMinExposure(TimeUnit.MILLISECONDS) + 1;
|
||||
maxExposure = (int)exposureControl.getMaxExposure(TimeUnit.MILLISECONDS);
|
||||
|
||||
GainControl gainControl = visionPortal.getCameraControl(GainControl.class);
|
||||
minGain = gainControl.getMinGain();
|
||||
maxGain = gainControl.getMaxGain();
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,195 @@
|
||||
/* Copyright (c) 2023 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import java.util.List;
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraName;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.VisionPortal.CameraState;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
|
||||
|
||||
/**
|
||||
* This 2023-2024 OpMode illustrates the basics of AprilTag recognition and pose estimation, using
|
||||
* two webcams.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*/
|
||||
@TeleOp(name = "Concept: AprilTag Switchable Cameras", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptAprilTagSwitchableCameras extends LinearOpMode {
|
||||
|
||||
/**
|
||||
* Variables used for switching cameras.
|
||||
*/
|
||||
private WebcamName webcam1, webcam2;
|
||||
private boolean oldLeftBumper;
|
||||
private boolean oldRightBumper;
|
||||
|
||||
/**
|
||||
* {@link #aprilTag} is the variable to store our instance of the AprilTag processor.
|
||||
*/
|
||||
private AprilTagProcessor aprilTag;
|
||||
|
||||
/**
|
||||
* {@link #visionPortal} is the variable to store our instance of the vision portal.
|
||||
*/
|
||||
private VisionPortal visionPortal;
|
||||
|
||||
@Override
|
||||
public void runOpMode() {
|
||||
|
||||
initAprilTag();
|
||||
|
||||
// Wait for the DS start button to be touched.
|
||||
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
|
||||
telemetry.addData(">", "Touch Play to start OpMode");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
if (opModeIsActive()) {
|
||||
while (opModeIsActive()) {
|
||||
|
||||
telemetryCameraSwitching();
|
||||
telemetryAprilTag();
|
||||
|
||||
// Push telemetry to the Driver Station.
|
||||
telemetry.update();
|
||||
|
||||
// Save CPU resources; can resume streaming when needed.
|
||||
if (gamepad1.dpad_down) {
|
||||
visionPortal.stopStreaming();
|
||||
} else if (gamepad1.dpad_up) {
|
||||
visionPortal.resumeStreaming();
|
||||
}
|
||||
|
||||
doCameraSwitching();
|
||||
|
||||
// Share the CPU.
|
||||
sleep(20);
|
||||
}
|
||||
}
|
||||
|
||||
// Save more CPU resources when camera is no longer needed.
|
||||
visionPortal.close();
|
||||
|
||||
} // end runOpMode()
|
||||
|
||||
/**
|
||||
* Initialize the AprilTag processor.
|
||||
*/
|
||||
private void initAprilTag() {
|
||||
|
||||
// Create the AprilTag processor by using a builder.
|
||||
aprilTag = new AprilTagProcessor.Builder().build();
|
||||
|
||||
webcam1 = hardwareMap.get(WebcamName.class, "Webcam 1");
|
||||
webcam2 = hardwareMap.get(WebcamName.class, "Webcam 2");
|
||||
CameraName switchableCamera = ClassFactory.getInstance()
|
||||
.getCameraManager().nameForSwitchableCamera(webcam1, webcam2);
|
||||
|
||||
// Create the vision portal by using a builder.
|
||||
visionPortal = new VisionPortal.Builder()
|
||||
.setCamera(switchableCamera)
|
||||
.addProcessor(aprilTag)
|
||||
.build();
|
||||
|
||||
} // end method initAprilTag()
|
||||
|
||||
/**
|
||||
* Function to add telemetry about camera switching.
|
||||
*/
|
||||
private void telemetryCameraSwitching() {
|
||||
|
||||
if (visionPortal.getActiveCamera().equals(webcam1)) {
|
||||
telemetry.addData("activeCamera", "Webcam 1");
|
||||
telemetry.addData("Press RightBumper", "to switch to Webcam 2");
|
||||
} else {
|
||||
telemetry.addData("activeCamera", "Webcam 2");
|
||||
telemetry.addData("Press LeftBumper", "to switch to Webcam 1");
|
||||
}
|
||||
|
||||
} // end method telemetryCameraSwitching()
|
||||
|
||||
/**
|
||||
* Function to add telemetry about AprilTag detections.
|
||||
*/
|
||||
private void telemetryAprilTag() {
|
||||
|
||||
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
|
||||
telemetry.addData("# AprilTags Detected", currentDetections.size());
|
||||
|
||||
// Step through the list of detections and display info for each one.
|
||||
for (AprilTagDetection detection : currentDetections) {
|
||||
if (detection.metadata != null) {
|
||||
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
|
||||
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
|
||||
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
|
||||
telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
|
||||
} else {
|
||||
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
|
||||
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
|
||||
}
|
||||
} // end for() loop
|
||||
|
||||
// Add "key" information to telemetry
|
||||
telemetry.addLine("\nkey:\nXYZ = X (Right), Y (Forward), Z (Up) dist.");
|
||||
telemetry.addLine("PRY = Pitch, Roll & Yaw (XYZ Rotation)");
|
||||
telemetry.addLine("RBE = Range, Bearing & Elevation");
|
||||
|
||||
} // end method telemetryAprilTag()
|
||||
|
||||
/**
|
||||
* Function to set the active camera according to input from the gamepad.
|
||||
*/
|
||||
private void doCameraSwitching() {
|
||||
if (visionPortal.getCameraState() == CameraState.STREAMING) {
|
||||
// If the left bumper is pressed, use Webcam 1.
|
||||
// If the right bumper is pressed, use Webcam 2.
|
||||
boolean newLeftBumper = gamepad1.left_bumper;
|
||||
boolean newRightBumper = gamepad1.right_bumper;
|
||||
if (newLeftBumper && !oldLeftBumper) {
|
||||
visionPortal.setActiveCamera(webcam1);
|
||||
} else if (newRightBumper && !oldRightBumper) {
|
||||
visionPortal.setActiveCamera(webcam2);
|
||||
}
|
||||
oldLeftBumper = newLeftBumper;
|
||||
oldRightBumper = newRightBumper;
|
||||
}
|
||||
|
||||
} // end method doCameraSwitching()
|
||||
|
||||
} // end class
|
@ -0,0 +1,201 @@
|
||||
/* Copyright (c) 2023 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import java.util.List;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
|
||||
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
|
||||
|
||||
/**
|
||||
* This 2023-2024 OpMode illustrates the basics of using both AprilTag recognition and TensorFlow
|
||||
* Object Detection.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*/
|
||||
@TeleOp(name = "Concept: Double Vision", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptDoubleVision extends LinearOpMode {
|
||||
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
|
||||
|
||||
/**
|
||||
* {@link #aprilTag} is the variable to store our instance of the AprilTag processor.
|
||||
*/
|
||||
private AprilTagProcessor aprilTag;
|
||||
|
||||
/**
|
||||
* {@link #tfod} is the variable to store our instance of the TensorFlow Object Detection processor.
|
||||
*/
|
||||
private TfodProcessor tfod;
|
||||
|
||||
/**
|
||||
* {@link #myVisionPortal} is the variable to store our instance of the vision portal.
|
||||
*/
|
||||
private VisionPortal myVisionPortal;
|
||||
|
||||
@Override
|
||||
public void runOpMode() {
|
||||
initDoubleVision();
|
||||
|
||||
// This OpMode loops continuously, allowing the user to switch between
|
||||
// AprilTag and TensorFlow Object Detection (TFOD) image processors.
|
||||
while (!isStopRequested()) {
|
||||
|
||||
if (opModeInInit()) {
|
||||
telemetry.addData("DS preview on/off","3 dots, Camera Stream");
|
||||
telemetry.addLine();
|
||||
telemetry.addLine("----------------------------------------");
|
||||
}
|
||||
|
||||
if (myVisionPortal.getProcessorEnabled(aprilTag)) {
|
||||
// User instructions: Dpad left or Dpad right.
|
||||
telemetry.addLine("Dpad Left to disable AprilTag");
|
||||
telemetry.addLine();
|
||||
telemetryAprilTag();
|
||||
} else {
|
||||
telemetry.addLine("Dpad Right to enable AprilTag");
|
||||
}
|
||||
telemetry.addLine();
|
||||
telemetry.addLine("----------------------------------------");
|
||||
if (myVisionPortal.getProcessorEnabled(tfod)) {
|
||||
telemetry.addLine("Dpad Down to disable TFOD");
|
||||
telemetry.addLine();
|
||||
telemetryTfod();
|
||||
} else {
|
||||
telemetry.addLine("Dpad Up to enable TFOD");
|
||||
}
|
||||
|
||||
// Push telemetry to the Driver Station.
|
||||
telemetry.update();
|
||||
|
||||
if (gamepad1.dpad_left) {
|
||||
myVisionPortal.setProcessorEnabled(aprilTag, false);
|
||||
} else if (gamepad1.dpad_right) {
|
||||
myVisionPortal.setProcessorEnabled(aprilTag, true);
|
||||
}
|
||||
if (gamepad1.dpad_down) {
|
||||
myVisionPortal.setProcessorEnabled(tfod, false);
|
||||
} else if (gamepad1.dpad_up) {
|
||||
myVisionPortal.setProcessorEnabled(tfod, true);
|
||||
}
|
||||
|
||||
sleep(20);
|
||||
|
||||
} // end while loop
|
||||
|
||||
} // end method runOpMode()
|
||||
|
||||
|
||||
/**
|
||||
* Function to initialize AprilTag and TFOD.
|
||||
*/
|
||||
private void initDoubleVision() {
|
||||
// -----------------------------------------------------------------------------------------
|
||||
// AprilTag Configuration
|
||||
// -----------------------------------------------------------------------------------------
|
||||
|
||||
aprilTag = new AprilTagProcessor.Builder()
|
||||
.build();
|
||||
|
||||
// -----------------------------------------------------------------------------------------
|
||||
// TFOD Configuration
|
||||
// -----------------------------------------------------------------------------------------
|
||||
|
||||
tfod = new TfodProcessor.Builder()
|
||||
.build();
|
||||
|
||||
// -----------------------------------------------------------------------------------------
|
||||
// Camera Configuration
|
||||
// -----------------------------------------------------------------------------------------
|
||||
|
||||
if (USE_WEBCAM) {
|
||||
myVisionPortal = new VisionPortal.Builder()
|
||||
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||
.addProcessors(tfod, aprilTag)
|
||||
.build();
|
||||
} else {
|
||||
myVisionPortal = new VisionPortal.Builder()
|
||||
.setCamera(BuiltinCameraDirection.BACK)
|
||||
.addProcessors(tfod, aprilTag)
|
||||
.build();
|
||||
}
|
||||
} // end initDoubleVision()
|
||||
|
||||
/**
|
||||
* Function to add telemetry about AprilTag detections.
|
||||
*/
|
||||
private void telemetryAprilTag() {
|
||||
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
|
||||
telemetry.addData("# AprilTags Detected", currentDetections.size());
|
||||
|
||||
// Step through the list of detections and display info for each one.
|
||||
for (AprilTagDetection detection : currentDetections) {
|
||||
if (detection.metadata != null) {
|
||||
telemetry.addLine(String.format("\n==== (ID %d) %s", detection.id, detection.metadata.name));
|
||||
telemetry.addLine(String.format("XYZ %6.1f %6.1f %6.1f (inch)", detection.ftcPose.x, detection.ftcPose.y, detection.ftcPose.z));
|
||||
telemetry.addLine(String.format("PRY %6.1f %6.1f %6.1f (deg)", detection.ftcPose.pitch, detection.ftcPose.roll, detection.ftcPose.yaw));
|
||||
telemetry.addLine(String.format("RBE %6.1f %6.1f %6.1f (inch, deg, deg)", detection.ftcPose.range, detection.ftcPose.bearing, detection.ftcPose.elevation));
|
||||
} else {
|
||||
telemetry.addLine(String.format("\n==== (ID %d) Unknown", detection.id));
|
||||
telemetry.addLine(String.format("Center %6.0f %6.0f (pixels)", detection.center.x, detection.center.y));
|
||||
}
|
||||
} // end for() loop
|
||||
|
||||
} // end method telemetryAprilTag()
|
||||
|
||||
/**
|
||||
* Function to add telemetry about TensorFlow Object Detection (TFOD) recognitions.
|
||||
*/
|
||||
private void telemetryTfod() {
|
||||
List<Recognition> currentRecognitions = tfod.getRecognitions();
|
||||
telemetry.addData("# Objects Detected", currentRecognitions.size());
|
||||
|
||||
// Step through the list of recognitions and display info for each one.
|
||||
for (Recognition recognition : currentRecognitions) {
|
||||
double x = (recognition.getLeft() + recognition.getRight()) / 2 ;
|
||||
double y = (recognition.getTop() + recognition.getBottom()) / 2 ;
|
||||
|
||||
telemetry.addData(""," ");
|
||||
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100);
|
||||
telemetry.addData("- Position", "%.0f / %.0f", x, y);
|
||||
telemetry.addData("- Size", "%.0f x %.0f", recognition.getWidth(), recognition.getHeight());
|
||||
} // end for() loop
|
||||
|
||||
} // end method telemetryTfod()
|
||||
|
||||
} // end class
|
@ -0,0 +1,185 @@
|
||||
/*
|
||||
Copyright (c) 2022 REV Robotics, FIRST
|
||||
|
||||
All rights reserved.
|
||||
|
||||
Redistribution and use in source and binary forms, with or without modification,
|
||||
are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
the following conditions are met:
|
||||
|
||||
Redistributions of source code must retain the above copyright notice, this list
|
||||
of conditions and the following disclaimer.
|
||||
|
||||
Redistributions in binary form must reproduce the above copyright notice, this
|
||||
list of conditions and the following disclaimer in the documentation and/or
|
||||
other materials provided with the distribution.
|
||||
|
||||
Neither the name of REV Robotics nor the names of its contributors may be used to
|
||||
endorse or promote products derived from this software without specific prior
|
||||
written permission.
|
||||
|
||||
NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.hardware.IMU;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
|
||||
|
||||
/**
|
||||
* This file demonstrates the impact of setting the IMU orientation correctly or incorrectly. This
|
||||
* code assumes there is an IMU configured with the name "imu".
|
||||
* <p>
|
||||
* Note: This OpMode is more of a tool than a code sample. The User Interface portion of this code
|
||||
* goes beyond simply showing how to interface to the IMU.<br>
|
||||
* For a minimal example of interfacing to an IMU, please see the SensorIMUOrthogonal or SensorIMUNonOrthogonal sample OpModes.
|
||||
* <p>
|
||||
* This sample enables you to re-specify the Hub Mounting orientation dynamically by using gamepad controls.
|
||||
* While doing so, the sample will display how Pitch, Roll and Yaw angles change as the hub is moved.
|
||||
* <p>
|
||||
* The gamepad controls let you change the two parameters that specify how the Control/Expansion Hub is mounted. <br>
|
||||
* The first parameter specifies which direction the printed logo on the Hub is pointing. <br>
|
||||
* The second parameter specifies which direction the USB connector on the Hub is pointing. <br>
|
||||
* All directions are relative to the robot, and left/right is as viewed from behind the robot.
|
||||
* <p>
|
||||
* How will you know if you have chosen the correct Orientation? With the correct orientation
|
||||
* parameters selected, pitch/roll/yaw should act as follows:
|
||||
* <p>
|
||||
* Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X) <br>
|
||||
* Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y) <br>
|
||||
* Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z) <br>
|
||||
* <p>
|
||||
* The Yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller)
|
||||
* <p>
|
||||
* The rotational velocities should follow the change in corresponding axes.
|
||||
*/
|
||||
|
||||
@TeleOp(name="Concept: IMU Orientation", group="Concept")
|
||||
@Disabled
|
||||
public class ConceptExploringIMUOrientation extends LinearOpMode {
|
||||
static RevHubOrientationOnRobot.LogoFacingDirection[] logoFacingDirections
|
||||
= RevHubOrientationOnRobot.LogoFacingDirection.values();
|
||||
static RevHubOrientationOnRobot.UsbFacingDirection[] usbFacingDirections
|
||||
= RevHubOrientationOnRobot.UsbFacingDirection.values();
|
||||
static int LAST_DIRECTION = logoFacingDirections.length - 1;
|
||||
static float TRIGGER_THRESHOLD = 0.2f;
|
||||
|
||||
IMU imu;
|
||||
int logoFacingDirectionPosition;
|
||||
int usbFacingDirectionPosition;
|
||||
boolean orientationIsValid = true;
|
||||
|
||||
@Override public void runOpMode() throws InterruptedException {
|
||||
imu = hardwareMap.get(IMU.class, "imu");
|
||||
logoFacingDirectionPosition = 0; // Up
|
||||
usbFacingDirectionPosition = 2; // Forward
|
||||
|
||||
updateOrientation();
|
||||
|
||||
boolean justChangedLogoDirection = false;
|
||||
boolean justChangedUsbDirection = false;
|
||||
|
||||
// Loop until stop requested
|
||||
while (!isStopRequested()) {
|
||||
|
||||
// Check to see if Yaw reset is requested (Y button)
|
||||
if (gamepad1.y) {
|
||||
telemetry.addData("Yaw", "Resetting\n");
|
||||
imu.resetYaw();
|
||||
} else {
|
||||
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset.\n");
|
||||
}
|
||||
|
||||
// Check to see if new Logo Direction is requested
|
||||
if (gamepad1.left_bumper || gamepad1.right_bumper) {
|
||||
if (!justChangedLogoDirection) {
|
||||
justChangedLogoDirection = true;
|
||||
if (gamepad1.left_bumper) {
|
||||
logoFacingDirectionPosition--;
|
||||
if (logoFacingDirectionPosition < 0) {
|
||||
logoFacingDirectionPosition = LAST_DIRECTION;
|
||||
}
|
||||
} else {
|
||||
logoFacingDirectionPosition++;
|
||||
if (logoFacingDirectionPosition > LAST_DIRECTION) {
|
||||
logoFacingDirectionPosition = 0;
|
||||
}
|
||||
}
|
||||
updateOrientation();
|
||||
}
|
||||
} else {
|
||||
justChangedLogoDirection = false;
|
||||
}
|
||||
|
||||
// Check to see if new USB Direction is requested
|
||||
if (gamepad1.left_trigger > TRIGGER_THRESHOLD || gamepad1.right_trigger > TRIGGER_THRESHOLD) {
|
||||
if (!justChangedUsbDirection) {
|
||||
justChangedUsbDirection = true;
|
||||
if (gamepad1.left_trigger > TRIGGER_THRESHOLD) {
|
||||
usbFacingDirectionPosition--;
|
||||
if (usbFacingDirectionPosition < 0) {
|
||||
usbFacingDirectionPosition = LAST_DIRECTION;
|
||||
}
|
||||
} else {
|
||||
usbFacingDirectionPosition++;
|
||||
if (usbFacingDirectionPosition > LAST_DIRECTION) {
|
||||
usbFacingDirectionPosition = 0;
|
||||
}
|
||||
}
|
||||
updateOrientation();
|
||||
}
|
||||
} else {
|
||||
justChangedUsbDirection = false;
|
||||
}
|
||||
|
||||
// Display User instructions and IMU data
|
||||
telemetry.addData("logo Direction (set with bumpers)", logoFacingDirections[logoFacingDirectionPosition]);
|
||||
telemetry.addData("usb Direction (set with triggers)", usbFacingDirections[usbFacingDirectionPosition] + "\n");
|
||||
|
||||
if (orientationIsValid) {
|
||||
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
|
||||
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
|
||||
|
||||
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
|
||||
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
|
||||
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
|
||||
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
|
||||
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
|
||||
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
|
||||
} else {
|
||||
telemetry.addData("Error", "Selected orientation on robot is invalid");
|
||||
}
|
||||
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
|
||||
// apply any requested orientation changes.
|
||||
void updateOrientation() {
|
||||
RevHubOrientationOnRobot.LogoFacingDirection logo = logoFacingDirections[logoFacingDirectionPosition];
|
||||
RevHubOrientationOnRobot.UsbFacingDirection usb = usbFacingDirections[usbFacingDirectionPosition];
|
||||
try {
|
||||
RevHubOrientationOnRobot orientationOnRobot = new RevHubOrientationOnRobot(logo, usb);
|
||||
imu.initialize(new IMU.Parameters(orientationOnRobot));
|
||||
orientationIsValid = true;
|
||||
} catch (IllegalArgumentException e) {
|
||||
orientationIsValid = false;
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,142 @@
|
||||
/* Copyright (c) 2022 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.util.Range;
|
||||
|
||||
/**
|
||||
* This OpMode Sample illustrates how to use an external "hardware" class to modularize all the robot's sensors and actuators.
|
||||
* This approach is very efficient because the same hardware class can be used by all of your teleop and autonomous OpModes
|
||||
* without requiring many copy & paste operations. Once you have defined and tested the hardware class with one OpMode,
|
||||
* it is instantly available to other OpModes.
|
||||
*
|
||||
* The real benefit of this approach is that as you tweak your robot hardware, you only need to make changes in ONE place (the Hardware Class).
|
||||
* So, to be effective you should put as much or your hardware setup and access code as possible in the hardware class.
|
||||
* Essentially anything you do with hardware in BOTH Teleop and Auto should likely go in the hardware class.
|
||||
*
|
||||
* The Hardware Class is created in a separate file, and then an "instance" of this class is created in each OpMode.
|
||||
* In order for the class to do typical OpMode things (like send telemetry data) it must be passed a reference to the
|
||||
* OpMode object when it's created, so it can access all core OpMode functions. This is illustrated below.
|
||||
*
|
||||
* In this concept sample, the hardware class file is called RobotHardware.java and it must accompany this sample OpMode.
|
||||
* So, if you copy ConceptExternalHardwareClass.java into TeamCode (using Android Studio or OnBotJava) then RobotHardware.java
|
||||
* must also be copied to the same location (maintaining its name).
|
||||
*
|
||||
* For comparison purposes, this sample and its accompanying hardware class duplicates the functionality of the
|
||||
* RobotTelopPOV_Linear opmode. It assumes three motors (left_drive, right_drive and arm) and two servos (left_hand and right_hand)
|
||||
*
|
||||
* View the RobotHardware.java class file for more details
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
|
||||
*
|
||||
* In OnBot Java, add a new OpMode, drawing from this Sample; select TeleOp.
|
||||
* Also add another new file named RobotHardware.java, drawing from the Sample with that name; select Not an OpMode.
|
||||
*/
|
||||
|
||||
@TeleOp(name="Concept: Robot Hardware Class", group="Robot")
|
||||
@Disabled
|
||||
public class ConceptExternalHardwareClass extends LinearOpMode {
|
||||
|
||||
// Create a RobotHardware object to be used to access robot hardware.
|
||||
// Prefix any hardware functions with "robot." to access this class.
|
||||
RobotHardware robot = new RobotHardware(this);
|
||||
|
||||
@Override
|
||||
public void runOpMode() {
|
||||
double drive = 0;
|
||||
double turn = 0;
|
||||
double arm = 0;
|
||||
double handOffset = 0;
|
||||
|
||||
// initialize all the hardware, using the hardware class. See how clean and simple this is?
|
||||
robot.init();
|
||||
|
||||
// Send telemetry message to signify robot waiting;
|
||||
// Wait for the game to start (driver presses PLAY)
|
||||
waitForStart();
|
||||
|
||||
// run until the end of the match (driver presses STOP)
|
||||
while (opModeIsActive()) {
|
||||
|
||||
// Run wheels in POV mode (note: The joystick goes negative when pushed forward, so negate it)
|
||||
// In this mode the Left stick moves the robot fwd and back, the Right stick turns left and right.
|
||||
// This way it's also easy to just drive straight, or just turn.
|
||||
drive = -gamepad1.left_stick_y;
|
||||
turn = gamepad1.right_stick_x;
|
||||
|
||||
// Combine drive and turn for blended motion. Use RobotHardware class
|
||||
robot.driveRobot(drive, turn);
|
||||
|
||||
// Use gamepad left & right Bumpers to open and close the claw
|
||||
// Use the SERVO constants defined in RobotHardware class.
|
||||
// Each time around the loop, the servos will move by a small amount.
|
||||
// Limit the total offset to half of the full travel range
|
||||
if (gamepad1.right_bumper)
|
||||
handOffset += robot.HAND_SPEED;
|
||||
else if (gamepad1.left_bumper)
|
||||
handOffset -= robot.HAND_SPEED;
|
||||
handOffset = Range.clip(handOffset, -0.5, 0.5);
|
||||
|
||||
// Move both servos to new position. Use RobotHardware class
|
||||
robot.setHandPositions(handOffset);
|
||||
|
||||
// Use gamepad buttons to move arm up (Y) and down (A)
|
||||
// Use the MOTOR constants defined in RobotHardware class.
|
||||
if (gamepad1.y)
|
||||
arm = robot.ARM_UP_POWER;
|
||||
else if (gamepad1.a)
|
||||
arm = robot.ARM_DOWN_POWER;
|
||||
else
|
||||
arm = 0;
|
||||
|
||||
robot.setArmPower(arm);
|
||||
|
||||
// Send telemetry messages to explain controls and show robot status
|
||||
telemetry.addData("Drive", "Left Stick");
|
||||
telemetry.addData("Turn", "Right Stick");
|
||||
telemetry.addData("Arm Up/Down", "Y & A Buttons");
|
||||
telemetry.addData("Hand Open/Closed", "Left and Right Bumpers");
|
||||
telemetry.addData("-", "-------");
|
||||
|
||||
telemetry.addData("Drive Power", "%.2f", drive);
|
||||
telemetry.addData("Turn Power", "%.2f", turn);
|
||||
telemetry.addData("Arm Power", "%.2f", arm);
|
||||
telemetry.addData("Hand Position", "Offset = %.2f", handOffset);
|
||||
telemetry.update();
|
||||
|
||||
// Pace this loop so hands move at a reasonable speed.
|
||||
sleep(50);
|
||||
}
|
||||
}
|
||||
}
|
@ -46,21 +46,28 @@ import java.util.List;
|
||||
|
||||
Three scenarios are tested:
|
||||
Cache Mode = OFF This is the normal default, where no cache is used, and every read produces a discrete transaction with
|
||||
an expansion hub, which is the slowest approach.
|
||||
an expansion hub, which is the slowest approach, but guarentees that the value is as fresh (recent) as possible..
|
||||
|
||||
Cache Mode = AUTO This mode will attempt to minimize the number of discrete read commands, by performing bulk-reads
|
||||
and then returning values that have been cached. The cache is updated automatically whenever a specific read operation is repeated.
|
||||
This mode will always return fresh data, but it may perform more bulk-reads than absolutely required.
|
||||
Extra reads will be performed if multiple identical encoder/velocity reads are performed in one control cycle.
|
||||
and then returning values that have been cached. The cache is updated automatically whenever any specific encoder is re-read.
|
||||
This mode will always return new data, but it may perform more bulk-reads than absolutely required.
|
||||
Extra reads will be performed if multiple encoder/velocity reads are performed on the same encoder in one control cycle.
|
||||
This mode is a good compromise between the OFF and MANUAL modes.
|
||||
Cache Mode = MANUAL This mode enables the user's code to determine the best time to refresh the cached bulk-read data.
|
||||
Well organized code can place all the sensor reads in one location, and then just reset the cache once per control cycle.
|
||||
The approach will produce the shortest cycle times, but it does require the user to manually clear the cache.
|
||||
Note: If there are significant user-program delays between encoder reads, the cached value may not be fresh (recent).
|
||||
You can issue a clearBulkCache() call at any time force a fresh bulk-read on the next encoder read.
|
||||
|
||||
Cache Mode = MANUAL This mode requires the user's code to determine the best time to clear the cached bulk-read data.
|
||||
Well organized code will reset the cache once at the beginning of the control cycle, and then immediately read and store all the encoder values.
|
||||
This approach will produce the shortest cycle times, but it does require the user to manually clear the cache.
|
||||
Since NO automatic Bulk-Reads are performed, neglecting to clear the bulk cache will result in the same values being returned
|
||||
each time an encoder read is performed.
|
||||
|
||||
-------------------------------------
|
||||
|
||||
General tip to speed up your control cycles:
|
||||
|
||||
No matter what method you use to read encoders and other inputs, you should try to
|
||||
avoid reading the same input multiple times around a control loop.
|
||||
avoid reading the same encoder input multiple times around a control loop.
|
||||
Under normal conditions, this will slow down the control loop.
|
||||
The preferred method is to read all the required inputs ONCE at the beginning of the loop,
|
||||
and save the values in variable that can be used by other parts of the control code.
|
||||
|
@ -33,154 +33,149 @@ import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import java.util.List;
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection;
|
||||
import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
|
||||
|
||||
/**
|
||||
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
|
||||
* determine the position of the Freight Frenzy game elements.
|
||||
* This 2023-2024 OpMode illustrates the basics of TensorFlow Object Detection,
|
||||
* including Java Builder structures for specifying Vision parameters.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*
|
||||
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
|
||||
* is explained below.
|
||||
*/
|
||||
@TeleOp(name = "Concept: TensorFlow Object Detection", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptTensorFlowObjectDetection extends LinearOpMode {
|
||||
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
|
||||
* the following 4 detectable objects
|
||||
* 0: Ball,
|
||||
* 1: Cube,
|
||||
* 2: Duck,
|
||||
* 3: Marker (duck location tape marker)
|
||||
*
|
||||
* Two additional model assets are available which only contain a subset of the objects:
|
||||
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
|
||||
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
|
||||
*/
|
||||
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
|
||||
private static final String[] LABELS = {
|
||||
"Ball",
|
||||
"Cube",
|
||||
"Duck",
|
||||
"Marker"
|
||||
};
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
|
||||
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
|
||||
* web site at https://developer.vuforia.com/license-manager.
|
||||
*
|
||||
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
|
||||
* random data. As an example, here is a example of a fragment of a valid key:
|
||||
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
|
||||
* Once you've obtained a license key, copy the string from the Vuforia web site
|
||||
* and paste it in to your code on the next line, between the double quotes.
|
||||
*/
|
||||
private static final String VUFORIA_KEY =
|
||||
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
|
||||
|
||||
/**
|
||||
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
|
||||
* localization engine.
|
||||
* {@link #tfod} is the variable to store our instance of the TensorFlow Object Detection processor.
|
||||
*/
|
||||
private VuforiaLocalizer vuforia;
|
||||
private TfodProcessor tfod;
|
||||
|
||||
/**
|
||||
* {@link #tfod} is the variable we will use to store our instance of the TensorFlow Object
|
||||
* Detection engine.
|
||||
* {@link #visionPortal} is the variable to store our instance of the vision portal.
|
||||
*/
|
||||
private TFObjectDetector tfod;
|
||||
private VisionPortal visionPortal;
|
||||
|
||||
@Override
|
||||
public void runOpMode() {
|
||||
// The TFObjectDetector uses the camera frames from the VuforiaLocalizer, so we create that
|
||||
// first.
|
||||
initVuforia();
|
||||
|
||||
initTfod();
|
||||
|
||||
/**
|
||||
* Activate TensorFlow Object Detection before we wait for the start command.
|
||||
* Do it here so that the Camera Stream window will have the TensorFlow annotations visible.
|
||||
**/
|
||||
if (tfod != null) {
|
||||
tfod.activate();
|
||||
|
||||
// The TensorFlow software will scale the input images from the camera to a lower resolution.
|
||||
// This can result in lower detection accuracy at longer distances (> 55cm or 22").
|
||||
// If your target is at distance greater than 50 cm (20") you can adjust the magnification value
|
||||
// to artificially zoom in to the center of image. For best results, the "aspectRatio" argument
|
||||
// should be set to the value of the images used to create the TensorFlow Object Detection model
|
||||
// (typically 16/9).
|
||||
tfod.setZoom(2.5, 16.0/9.0);
|
||||
}
|
||||
|
||||
/** Wait for the game to begin */
|
||||
telemetry.addData(">", "Press Play to start op mode");
|
||||
// Wait for the DS start button to be touched.
|
||||
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
|
||||
telemetry.addData(">", "Touch Play to start OpMode");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
if (opModeIsActive()) {
|
||||
while (opModeIsActive()) {
|
||||
if (tfod != null) {
|
||||
// getUpdatedRecognitions() will return null if no new information is available since
|
||||
// the last time that call was made.
|
||||
List<Recognition> updatedRecognitions = tfod.getUpdatedRecognitions();
|
||||
if (updatedRecognitions != null) {
|
||||
telemetry.addData("# Object Detected", updatedRecognitions.size());
|
||||
|
||||
// step through the list of recognitions and display boundary info.
|
||||
int i = 0;
|
||||
for (Recognition recognition : updatedRecognitions) {
|
||||
telemetry.addData(String.format("label (%d)", i), recognition.getLabel());
|
||||
telemetry.addData(String.format(" left,top (%d)", i), "%.03f , %.03f",
|
||||
recognition.getLeft(), recognition.getTop());
|
||||
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
|
||||
recognition.getRight(), recognition.getBottom());
|
||||
i++;
|
||||
}
|
||||
telemetry.update();
|
||||
}
|
||||
telemetryTfod();
|
||||
|
||||
// Push telemetry to the Driver Station.
|
||||
telemetry.update();
|
||||
|
||||
// Save CPU resources; can resume streaming when needed.
|
||||
if (gamepad1.dpad_down) {
|
||||
visionPortal.stopStreaming();
|
||||
} else if (gamepad1.dpad_up) {
|
||||
visionPortal.resumeStreaming();
|
||||
}
|
||||
|
||||
// Share the CPU.
|
||||
sleep(20);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save more CPU resources when camera is no longer needed.
|
||||
visionPortal.close();
|
||||
|
||||
} // end runOpMode()
|
||||
|
||||
/**
|
||||
* Initialize the Vuforia localization engine.
|
||||
*/
|
||||
private void initVuforia() {
|
||||
/*
|
||||
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
|
||||
*/
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
parameters.vuforiaLicenseKey = VUFORIA_KEY;
|
||||
parameters.cameraDirection = CameraDirection.BACK;
|
||||
|
||||
// Instantiate the Vuforia engine
|
||||
vuforia = ClassFactory.getInstance().createVuforia(parameters);
|
||||
|
||||
// Loading trackables is not necessary for the TensorFlow Object Detection engine.
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the TensorFlow Object Detection engine.
|
||||
* Initialize the TensorFlow Object Detection processor.
|
||||
*/
|
||||
private void initTfod() {
|
||||
int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
|
||||
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
|
||||
tfodParameters.minResultConfidence = 0.8f;
|
||||
tfodParameters.isModelTensorFlow2 = true;
|
||||
tfodParameters.inputSize = 320;
|
||||
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
|
||||
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
|
||||
}
|
||||
}
|
||||
|
||||
// Create the TensorFlow processor by using a builder.
|
||||
tfod = new TfodProcessor.Builder()
|
||||
|
||||
// Use setModelAssetName() if the TF Model is built in as an asset.
|
||||
// Use setModelFileName() if you have downloaded a custom team model to the Robot Controller.
|
||||
//.setModelAssetName(TFOD_MODEL_ASSET)
|
||||
//.setModelFileName(TFOD_MODEL_FILE)
|
||||
|
||||
//.setModelLabels(LABELS)
|
||||
//.setIsModelTensorFlow2(true)
|
||||
//.setIsModelQuantized(true)
|
||||
//.setModelInputSize(300)
|
||||
//.setModelAspectRatio(16.0 / 9.0)
|
||||
|
||||
.build();
|
||||
|
||||
// Create the vision portal by using a builder.
|
||||
VisionPortal.Builder builder = new VisionPortal.Builder();
|
||||
|
||||
// Set the camera (webcam vs. built-in RC phone camera).
|
||||
if (USE_WEBCAM) {
|
||||
builder.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"));
|
||||
} else {
|
||||
builder.setCamera(BuiltinCameraDirection.BACK);
|
||||
}
|
||||
|
||||
// Choose a camera resolution. Not all cameras support all resolutions.
|
||||
//builder.setCameraResolution(new Size(640, 480));
|
||||
|
||||
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
|
||||
//builder.enableCameraMonitoring(true);
|
||||
|
||||
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
|
||||
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
|
||||
|
||||
// Choose whether or not LiveView stops if no processors are enabled.
|
||||
// If set "true", monitor shows solid orange screen if no processors enabled.
|
||||
// If set "false", monitor shows camera view without annotations.
|
||||
//builder.setAutoStopLiveView(false);
|
||||
|
||||
// Set and enable the processor.
|
||||
builder.addProcessor(tfod);
|
||||
|
||||
// Build the Vision Portal, using the above settings.
|
||||
visionPortal = builder.build();
|
||||
|
||||
// Set confidence threshold for TFOD recognitions, at any time.
|
||||
//tfod.setMinResultConfidence(0.75f);
|
||||
|
||||
// Disable or re-enable the TFOD processor at any time.
|
||||
//visionPortal.setProcessorEnabled(tfod, true);
|
||||
|
||||
} // end method initTfod()
|
||||
|
||||
/**
|
||||
* Function to add telemetry about TensorFlow Object Detection (TFOD) recognitions.
|
||||
*/
|
||||
private void telemetryTfod() {
|
||||
|
||||
List<Recognition> currentRecognitions = tfod.getRecognitions();
|
||||
telemetry.addData("# Objects Detected", currentRecognitions.size());
|
||||
|
||||
// Step through the list of recognitions and display info for each one.
|
||||
for (Recognition recognition : currentRecognitions) {
|
||||
double x = (recognition.getLeft() + recognition.getRight()) / 2 ;
|
||||
double y = (recognition.getTop() + recognition.getBottom()) / 2 ;
|
||||
|
||||
telemetry.addData(""," ");
|
||||
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100);
|
||||
telemetry.addData("- Position", "%.0f / %.0f", x, y);
|
||||
telemetry.addData("- Size", "%.0f x %.0f", recognition.getWidth(), recognition.getHeight());
|
||||
} // end for() loop
|
||||
|
||||
} // end method telemetryTfod()
|
||||
|
||||
} // end class
|
||||
|
@ -0,0 +1,141 @@
|
||||
/* Copyright (c) 2019 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import java.util.List;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
|
||||
|
||||
/**
|
||||
* This 2023-2024 OpMode illustrates the basics of TensorFlow Object Detection, using
|
||||
* the easiest way.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*/
|
||||
@TeleOp(name = "Concept: TensorFlow Object Detection Easy", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptTensorFlowObjectDetectionEasy extends LinearOpMode {
|
||||
|
||||
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
|
||||
|
||||
/**
|
||||
* {@link #tfod} is the variable to store our instance of the TensorFlow Object Detection processor.
|
||||
*/
|
||||
private TfodProcessor tfod;
|
||||
|
||||
/**
|
||||
* {@link #visionPortal} is the variable to store our instance of the vision portal.
|
||||
*/
|
||||
private VisionPortal visionPortal;
|
||||
|
||||
@Override
|
||||
public void runOpMode() {
|
||||
|
||||
initTfod();
|
||||
|
||||
// Wait for the DS start button to be touched.
|
||||
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
|
||||
telemetry.addData(">", "Touch Play to start OpMode");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
if (opModeIsActive()) {
|
||||
while (opModeIsActive()) {
|
||||
|
||||
telemetryTfod();
|
||||
|
||||
// Push telemetry to the Driver Station.
|
||||
telemetry.update();
|
||||
|
||||
// Save CPU resources; can resume streaming when needed.
|
||||
if (gamepad1.dpad_down) {
|
||||
visionPortal.stopStreaming();
|
||||
} else if (gamepad1.dpad_up) {
|
||||
visionPortal.resumeStreaming();
|
||||
}
|
||||
|
||||
// Share the CPU.
|
||||
sleep(20);
|
||||
}
|
||||
}
|
||||
|
||||
// Save more CPU resources when camera is no longer needed.
|
||||
visionPortal.close();
|
||||
|
||||
} // end runOpMode()
|
||||
|
||||
/**
|
||||
* Initialize the TensorFlow Object Detection processor.
|
||||
*/
|
||||
private void initTfod() {
|
||||
|
||||
// Create the TensorFlow processor the easy way.
|
||||
tfod = TfodProcessor.easyCreateWithDefaults();
|
||||
|
||||
// Create the vision portal the easy way.
|
||||
if (USE_WEBCAM) {
|
||||
visionPortal = VisionPortal.easyCreateWithDefaults(
|
||||
hardwareMap.get(WebcamName.class, "Webcam 1"), tfod);
|
||||
} else {
|
||||
visionPortal = VisionPortal.easyCreateWithDefaults(
|
||||
BuiltinCameraDirection.BACK, tfod);
|
||||
}
|
||||
|
||||
} // end method initTfod()
|
||||
|
||||
/**
|
||||
* Function to add telemetry about TensorFlow Object Detection (TFOD) recognitions.
|
||||
*/
|
||||
private void telemetryTfod() {
|
||||
|
||||
List<Recognition> currentRecognitions = tfod.getRecognitions();
|
||||
telemetry.addData("# Objects Detected", currentRecognitions.size());
|
||||
|
||||
// Step through the list of recognitions and display info for each one.
|
||||
for (Recognition recognition : currentRecognitions) {
|
||||
double x = (recognition.getLeft() + recognition.getRight()) / 2 ;
|
||||
double y = (recognition.getTop() + recognition.getBottom()) / 2 ;
|
||||
|
||||
telemetry.addData(""," ");
|
||||
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100);
|
||||
telemetry.addData("- Position", "%.0f / %.0f", x, y);
|
||||
telemetry.addData("- Size", "%.0f x %.0f", recognition.getWidth(), recognition.getHeight());
|
||||
} // end for() loop
|
||||
|
||||
} // end method telemetryTfod()
|
||||
|
||||
} // end class
|
@ -34,188 +34,152 @@ import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import java.util.List;
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.SwitchableCamera;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.CameraName;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
|
||||
import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;
|
||||
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.VisionPortal.CameraState;
|
||||
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
|
||||
|
||||
/**
|
||||
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
|
||||
* determine the position of the Freight Frenzy game elements.
|
||||
* This 2023-2024 OpMode illustrates the basics of TensorFlow Object Detection, using
|
||||
* two webcams.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
|
||||
*
|
||||
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
|
||||
* is explained below.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*/
|
||||
@TeleOp(name = "Concept: TensorFlow Object Detection Switchable Cameras", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpMode {
|
||||
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
|
||||
* the following 4 detectable objects
|
||||
* 0: Ball,
|
||||
* 1: Cube,
|
||||
* 2: Duck,
|
||||
* 3: Marker (duck location tape marker)
|
||||
*
|
||||
* Two additional model assets are available which only contain a subset of the objects:
|
||||
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
|
||||
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
|
||||
*/
|
||||
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
|
||||
private static final String[] LABELS = {
|
||||
"Ball",
|
||||
"Cube",
|
||||
"Duck",
|
||||
"Marker"
|
||||
};
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
|
||||
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
|
||||
* web site at https://developer.vuforia.com/license-manager.
|
||||
*
|
||||
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
|
||||
* random data. As an example, here is a example of a fragment of a valid key:
|
||||
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
|
||||
* Once you've obtained a license key, copy the string from the Vuforia web site
|
||||
* and paste it in to your code on the next line, between the double quotes.
|
||||
*/
|
||||
private static final String VUFORIA_KEY =
|
||||
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
|
||||
/**
|
||||
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
|
||||
* localization engine.
|
||||
*/
|
||||
private VuforiaLocalizer vuforia;
|
||||
|
||||
/**
|
||||
* Variables used for switching cameras.
|
||||
*/
|
||||
private WebcamName webcam1, webcam2;
|
||||
private SwitchableCamera switchableCamera;
|
||||
private boolean oldLeftBumper;
|
||||
private boolean oldRightBumper;
|
||||
|
||||
/**
|
||||
* {@link #tfod} is the variable we will use to store our instance of the TensorFlow Object
|
||||
* Detection engine.
|
||||
* {@link #tfod} is the variable to store our instance of the TensorFlow Object Detection processor.
|
||||
*/
|
||||
private TFObjectDetector tfod;
|
||||
private TfodProcessor tfod;
|
||||
|
||||
/**
|
||||
* {@link #visionPortal} is the variable to store our instance of the vision portal.
|
||||
*/
|
||||
private VisionPortal visionPortal;
|
||||
|
||||
@Override
|
||||
public void runOpMode() {
|
||||
// The TFObjectDetector uses the camera frames from the VuforiaLocalizer, so we create that
|
||||
// first.
|
||||
initVuforia();
|
||||
|
||||
initTfod();
|
||||
|
||||
/**
|
||||
* Activate TensorFlow Object Detection before we wait for the start command.
|
||||
* Do it here so that the Camera Stream window will have the TensorFlow annotations visible.
|
||||
**/
|
||||
if (tfod != null) {
|
||||
tfod.activate();
|
||||
|
||||
// The TensorFlow software will scale the input images from the camera to a lower resolution.
|
||||
// This can result in lower detection accuracy at longer distances (> 55cm or 22").
|
||||
// If your target is at distance greater than 50 cm (20") you can adjust the magnification value
|
||||
// to artificially zoom in to the center of image. For best results, the "aspectRatio" argument
|
||||
// should be set to the value of the images used to create the TensorFlow Object Detection model
|
||||
// (typically 16/9).
|
||||
tfod.setZoom(2.5, 16.0/9.0);
|
||||
}
|
||||
|
||||
/** Wait for the game to begin */
|
||||
telemetry.addData(">", "Press Play to start op mode");
|
||||
// Wait for the DS start button to be touched.
|
||||
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
|
||||
telemetry.addData(">", "Touch Play to start OpMode");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
if (opModeIsActive()) {
|
||||
while (opModeIsActive()) {
|
||||
if (tfod != null) {
|
||||
doCameraSwitching();
|
||||
List<Recognition> recognitions = tfod.getRecognitions();
|
||||
telemetry.addData("# Object Detected", recognitions.size());
|
||||
// step through the list of recognitions and display boundary info.
|
||||
int i = 0;
|
||||
for (Recognition recognition : recognitions) {
|
||||
telemetry.addData(String.format("label (%d)", i), recognition.getLabel());
|
||||
telemetry.addData(String.format(" left,top (%d)", i), "%.03f , %.03f",
|
||||
recognition.getLeft(), recognition.getTop());
|
||||
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
|
||||
recognition.getRight(), recognition.getBottom());
|
||||
i++;
|
||||
}
|
||||
telemetry.update();
|
||||
|
||||
telemetryCameraSwitching();
|
||||
telemetryTfod();
|
||||
|
||||
// Push telemetry to the Driver Station.
|
||||
telemetry.update();
|
||||
|
||||
// Save CPU resources; can resume streaming when needed.
|
||||
if (gamepad1.dpad_down) {
|
||||
visionPortal.stopStreaming();
|
||||
} else if (gamepad1.dpad_up) {
|
||||
visionPortal.resumeStreaming();
|
||||
}
|
||||
|
||||
doCameraSwitching();
|
||||
|
||||
// Share the CPU.
|
||||
sleep(20);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save more CPU resources when camera is no longer needed.
|
||||
visionPortal.close();
|
||||
|
||||
} // end runOpMode()
|
||||
|
||||
/**
|
||||
* Initialize the Vuforia localization engine.
|
||||
*/
|
||||
private void initVuforia() {
|
||||
/*
|
||||
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
|
||||
*/
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
parameters.vuforiaLicenseKey = VUFORIA_KEY;
|
||||
|
||||
// Indicate that we wish to be able to switch cameras.
|
||||
webcam1 = hardwareMap.get(WebcamName.class, "Webcam 1");
|
||||
webcam2 = hardwareMap.get(WebcamName.class, "Webcam 2");
|
||||
parameters.cameraName = ClassFactory.getInstance().getCameraManager().nameForSwitchableCamera(webcam1, webcam2);
|
||||
|
||||
// Instantiate the Vuforia engine
|
||||
vuforia = ClassFactory.getInstance().createVuforia(parameters);
|
||||
|
||||
// Set the active camera to Webcam 1.
|
||||
switchableCamera = (SwitchableCamera) vuforia.getCamera();
|
||||
switchableCamera.setActiveCamera(webcam1);
|
||||
|
||||
// Loading trackables is not necessary for the TensorFlow Object Detection engine.
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the TensorFlow Object Detection engine.
|
||||
* Initialize the TensorFlow Object Detection processor.
|
||||
*/
|
||||
private void initTfod() {
|
||||
int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
|
||||
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
|
||||
tfodParameters.minResultConfidence = 0.8f;
|
||||
tfodParameters.isModelTensorFlow2 = true;
|
||||
tfodParameters.inputSize = 320;
|
||||
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
|
||||
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
|
||||
}
|
||||
|
||||
private void doCameraSwitching() {
|
||||
// If the left bumper is pressed, use Webcam 1.
|
||||
// If the right bumper is pressed, use Webcam 2.
|
||||
boolean newLeftBumper = gamepad1.left_bumper;
|
||||
boolean newRightBumper = gamepad1.right_bumper;
|
||||
if (newLeftBumper && !oldLeftBumper) {
|
||||
switchableCamera.setActiveCamera(webcam1);
|
||||
} else if (newRightBumper && !oldRightBumper) {
|
||||
switchableCamera.setActiveCamera(webcam2);
|
||||
}
|
||||
oldLeftBumper = newLeftBumper;
|
||||
oldRightBumper = newRightBumper;
|
||||
// Create the TensorFlow processor by using a builder.
|
||||
tfod = new TfodProcessor.Builder().build();
|
||||
|
||||
if (switchableCamera.getActiveCamera().equals(webcam1)) {
|
||||
webcam1 = hardwareMap.get(WebcamName.class, "Webcam 1");
|
||||
webcam2 = hardwareMap.get(WebcamName.class, "Webcam 2");
|
||||
CameraName switchableCamera = ClassFactory.getInstance()
|
||||
.getCameraManager().nameForSwitchableCamera(webcam1, webcam2);
|
||||
|
||||
// Create the vision portal by using a builder.
|
||||
visionPortal = new VisionPortal.Builder()
|
||||
.setCamera(switchableCamera)
|
||||
.addProcessor(tfod)
|
||||
.build();
|
||||
|
||||
} // end method initTfod()
|
||||
|
||||
/**
|
||||
* Function to add telemetry about camera switching.
|
||||
*/
|
||||
private void telemetryCameraSwitching() {
|
||||
if (visionPortal.getActiveCamera().equals(webcam1)) {
|
||||
telemetry.addData("activeCamera", "Webcam 1");
|
||||
telemetry.addData("Press RightBumper", "to switch to Webcam 2");
|
||||
} else {
|
||||
telemetry.addData("activeCamera", "Webcam 2");
|
||||
telemetry.addData("Press LeftBumper", "to switch to Webcam 1");
|
||||
}
|
||||
}
|
||||
}
|
||||
} // end method telemetryCameraSwitching()
|
||||
|
||||
/**
|
||||
* Function to add telemetry about TensorFlow Object Detection (TFOD) recognitions.
|
||||
*/
|
||||
private void telemetryTfod() {
|
||||
|
||||
List<Recognition> currentRecognitions = tfod.getRecognitions();
|
||||
telemetry.addData("# Objects Detected", currentRecognitions.size());
|
||||
|
||||
// Step through the list of recognitions and display info for each one.
|
||||
for (Recognition recognition : currentRecognitions) {
|
||||
double x = (recognition.getLeft() + recognition.getRight()) / 2 ;
|
||||
double y = (recognition.getTop() + recognition.getBottom()) / 2 ;
|
||||
|
||||
telemetry.addData(""," ");
|
||||
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100);
|
||||
telemetry.addData("- Position", "%.0f / %.0f", x, y);
|
||||
telemetry.addData("- Size", "%.0f x %.0f", recognition.getWidth(), recognition.getHeight());
|
||||
} // end for() loop
|
||||
|
||||
} // end method telemetryTfod()
|
||||
|
||||
/**
|
||||
* Function to set the active camera according to input from the gamepad.
|
||||
*/
|
||||
private void doCameraSwitching() {
|
||||
if (visionPortal.getCameraState() == CameraState.STREAMING) {
|
||||
// If the left bumper is pressed, use Webcam 1.
|
||||
// If the right bumper is pressed, use Webcam 2.
|
||||
boolean newLeftBumper = gamepad1.left_bumper;
|
||||
boolean newRightBumper = gamepad1.right_bumper;
|
||||
if (newLeftBumper && !oldLeftBumper) {
|
||||
visionPortal.setActiveCamera(webcam1);
|
||||
} else if (newRightBumper && !oldRightBumper) {
|
||||
visionPortal.setActiveCamera(webcam2);
|
||||
}
|
||||
oldLeftBumper = newLeftBumper;
|
||||
oldRightBumper = newRightBumper;
|
||||
}
|
||||
} // end method doCameraSwitching()
|
||||
|
||||
} // end class
|
||||
|
@ -1,185 +0,0 @@
|
||||
/* Copyright (c) 2019 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import java.util.List;
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
|
||||
import org.firstinspires.ftc.robotcore.external.tfod.TFObjectDetector;
|
||||
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
|
||||
/**
|
||||
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
|
||||
* determine the position of the Freight Frenzy game elements.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
|
||||
*
|
||||
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
|
||||
* is explained below.
|
||||
*/
|
||||
@TeleOp(name = "Concept: TensorFlow Object Detection Webcam", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
|
||||
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
|
||||
* the following 4 detectable objects
|
||||
* 0: Ball,
|
||||
* 1: Cube,
|
||||
* 2: Duck,
|
||||
* 3: Marker (duck location tape marker)
|
||||
*
|
||||
* Two additional model assets are available which only contain a subset of the objects:
|
||||
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
|
||||
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
|
||||
*/
|
||||
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
|
||||
private static final String[] LABELS = {
|
||||
"Ball",
|
||||
"Cube",
|
||||
"Duck",
|
||||
"Marker"
|
||||
};
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
|
||||
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
|
||||
* web site at https://developer.vuforia.com/license-manager.
|
||||
*
|
||||
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
|
||||
* random data. As an example, here is a example of a fragment of a valid key:
|
||||
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
|
||||
* Once you've obtained a license key, copy the string from the Vuforia web site
|
||||
* and paste it in to your code on the next line, between the double quotes.
|
||||
*/
|
||||
private static final String VUFORIA_KEY =
|
||||
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
|
||||
/**
|
||||
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
|
||||
* localization engine.
|
||||
*/
|
||||
private VuforiaLocalizer vuforia;
|
||||
|
||||
/**
|
||||
* {@link #tfod} is the variable we will use to store our instance of the TensorFlow Object
|
||||
* Detection engine.
|
||||
*/
|
||||
private TFObjectDetector tfod;
|
||||
|
||||
@Override
|
||||
public void runOpMode() {
|
||||
// The TFObjectDetector uses the camera frames from the VuforiaLocalizer, so we create that
|
||||
// first.
|
||||
initVuforia();
|
||||
initTfod();
|
||||
|
||||
/**
|
||||
* Activate TensorFlow Object Detection before we wait for the start command.
|
||||
* Do it here so that the Camera Stream window will have the TensorFlow annotations visible.
|
||||
**/
|
||||
if (tfod != null) {
|
||||
tfod.activate();
|
||||
|
||||
// The TensorFlow software will scale the input images from the camera to a lower resolution.
|
||||
// This can result in lower detection accuracy at longer distances (> 55cm or 22").
|
||||
// If your target is at distance greater than 50 cm (20") you can adjust the magnification value
|
||||
// to artificially zoom in to the center of image. For best results, the "aspectRatio" argument
|
||||
// should be set to the value of the images used to create the TensorFlow Object Detection model
|
||||
// (typically 16/9).
|
||||
tfod.setZoom(2.5, 16.0/9.0);
|
||||
}
|
||||
|
||||
/** Wait for the game to begin */
|
||||
telemetry.addData(">", "Press Play to start op mode");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
if (opModeIsActive()) {
|
||||
while (opModeIsActive()) {
|
||||
if (tfod != null) {
|
||||
// getUpdatedRecognitions() will return null if no new information is available since
|
||||
// the last time that call was made.
|
||||
List<Recognition> updatedRecognitions = tfod.getUpdatedRecognitions();
|
||||
if (updatedRecognitions != null) {
|
||||
telemetry.addData("# Object Detected", updatedRecognitions.size());
|
||||
// step through the list of recognitions and display boundary info.
|
||||
int i = 0;
|
||||
for (Recognition recognition : updatedRecognitions) {
|
||||
telemetry.addData(String.format("label (%d)", i), recognition.getLabel());
|
||||
telemetry.addData(String.format(" left,top (%d)", i), "%.03f , %.03f",
|
||||
recognition.getLeft(), recognition.getTop());
|
||||
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
|
||||
recognition.getRight(), recognition.getBottom());
|
||||
i++;
|
||||
}
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the Vuforia localization engine.
|
||||
*/
|
||||
private void initVuforia() {
|
||||
/*
|
||||
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
|
||||
*/
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
parameters.vuforiaLicenseKey = VUFORIA_KEY;
|
||||
parameters.cameraName = hardwareMap.get(WebcamName.class, "Webcam 1");
|
||||
|
||||
// Instantiate the Vuforia engine
|
||||
vuforia = ClassFactory.getInstance().createVuforia(parameters);
|
||||
|
||||
// Loading trackables is not necessary for the TensorFlow Object Detection engine.
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the TensorFlow Object Detection engine.
|
||||
*/
|
||||
private void initTfod() {
|
||||
int tfodMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
|
||||
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
|
||||
tfodParameters.minResultConfidence = 0.8f;
|
||||
tfodParameters.isModelTensorFlow2 = true;
|
||||
tfodParameters.inputSize = 320;
|
||||
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
|
||||
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
|
||||
}
|
||||
}
|
@ -1,186 +0,0 @@
|
||||
/* Copyright (c) 2017 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.RelicRecoveryVuMark;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuMarkInstanceId;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
|
||||
|
||||
/**
|
||||
* This OpMode illustrates the basics of using the Vuforia engine to determine
|
||||
* the identity of Vuforia VuMarks encountered on the field. The code is structured as
|
||||
* a LinearOpMode. It shares much structure with {@link ConceptVuforiaNavigation}; we do not here
|
||||
* duplicate the core Vuforia documentation found there, but rather instead focus on the
|
||||
* differences between the use of Vuforia for navigation vs VuMark identification.
|
||||
*
|
||||
* @see ConceptVuforiaNavigation
|
||||
* @see VuforiaLocalizer
|
||||
* @see VuforiaTrackableDefaultListener
|
||||
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*
|
||||
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
|
||||
* is explained in {@link ConceptVuforiaNavigation}.
|
||||
*/
|
||||
|
||||
@TeleOp(name="Concept: VuMark Id", group ="Concept")
|
||||
@Disabled
|
||||
public class ConceptVuMarkIdentification extends LinearOpMode {
|
||||
|
||||
public static final String TAG = "Vuforia VuMark Sample";
|
||||
|
||||
OpenGLMatrix lastLocation = null;
|
||||
|
||||
/**
|
||||
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
|
||||
* localization engine.
|
||||
*/
|
||||
VuforiaLocalizer vuforia;
|
||||
|
||||
@Override public void runOpMode() {
|
||||
|
||||
/*
|
||||
* To start up Vuforia, tell it the view that we wish to use for camera monitor (on the RC phone);
|
||||
*/
|
||||
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
|
||||
|
||||
// OR... Do Not Activate the Camera Monitor View, to save power
|
||||
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
|
||||
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
|
||||
* web site at https://developer.vuforia.com/license-manager.
|
||||
*
|
||||
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
|
||||
* random data. As an example, here is a example of a fragment of a valid key:
|
||||
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
|
||||
* Once you've obtained a license key, copy the string from the Vuforia web site
|
||||
* and paste it in to your code on the next line, between the double quotes.
|
||||
*/
|
||||
parameters.vuforiaLicenseKey = " -- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
|
||||
/*
|
||||
* We also indicate which camera on the RC that we wish to use.
|
||||
* Here we chose the back (HiRes) camera (for greater range), but
|
||||
* for a competition robot, the front camera might be more convenient.
|
||||
*/
|
||||
parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
|
||||
|
||||
/**
|
||||
* Instantiate the Vuforia engine
|
||||
*/
|
||||
vuforia = ClassFactory.getInstance().createVuforia(parameters);
|
||||
|
||||
|
||||
/**
|
||||
* Load the data set containing the VuMarks for Relic Recovery. There's only one trackable
|
||||
* in this data set: all three of the VuMarks in the game were created from this one template,
|
||||
* but differ in their instance id information.
|
||||
* @see VuMarkInstanceId
|
||||
*/
|
||||
VuforiaTrackables relicTrackables = this.vuforia.loadTrackablesFromAsset("RelicVuMark");
|
||||
VuforiaTrackable relicTemplate = relicTrackables.get(0);
|
||||
relicTemplate.setName("relicVuMarkTemplate"); // can help in debugging; otherwise not necessary
|
||||
|
||||
telemetry.addData(">", "Press Play to start");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
relicTrackables.activate();
|
||||
|
||||
while (opModeIsActive()) {
|
||||
|
||||
/**
|
||||
* See if any of the instances of {@link relicTemplate} are currently visible.
|
||||
* {@link RelicRecoveryVuMark} is an enum which can have the following values:
|
||||
* UNKNOWN, LEFT, CENTER, and RIGHT. When a VuMark is visible, something other than
|
||||
* UNKNOWN will be returned by {@link RelicRecoveryVuMark#from(VuforiaTrackable)}.
|
||||
*/
|
||||
RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate);
|
||||
if (vuMark != RelicRecoveryVuMark.UNKNOWN) {
|
||||
|
||||
/* Found an instance of the template. In the actual game, you will probably
|
||||
* loop until this condition occurs, then move on to act accordingly depending
|
||||
* on which VuMark was visible. */
|
||||
telemetry.addData("VuMark", "%s visible", vuMark);
|
||||
|
||||
/* For fun, we also exhibit the navigational pose. In the Relic Recovery game,
|
||||
* it is perhaps unlikely that you will actually need to act on this pose information, but
|
||||
* we illustrate it nevertheless, for completeness. */
|
||||
OpenGLMatrix pose = ((VuforiaTrackableDefaultListener)relicTemplate.getListener()).getPose();
|
||||
telemetry.addData("Pose", format(pose));
|
||||
|
||||
/* We further illustrate how to decompose the pose into useful rotational and
|
||||
* translational components */
|
||||
if (pose != null) {
|
||||
VectorF trans = pose.getTranslation();
|
||||
Orientation rot = Orientation.getOrientation(pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);
|
||||
|
||||
// Extract the X, Y, and Z components of the offset of the target relative to the robot
|
||||
double tX = trans.get(0);
|
||||
double tY = trans.get(1);
|
||||
double tZ = trans.get(2);
|
||||
|
||||
// Extract the rotational components of the target relative to the robot
|
||||
double rX = rot.firstAngle;
|
||||
double rY = rot.secondAngle;
|
||||
double rZ = rot.thirdAngle;
|
||||
}
|
||||
}
|
||||
else {
|
||||
telemetry.addData("VuMark", "not visible");
|
||||
}
|
||||
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
|
||||
String format(OpenGLMatrix transformationMatrix) {
|
||||
return (transformationMatrix != null) ? transformationMatrix.formatAsTransform() : "null";
|
||||
}
|
||||
}
|
@ -1,194 +0,0 @@
|
||||
/* Copyright (c) 2017 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.RelicRecoveryVuMark;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuMarkInstanceId;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
|
||||
|
||||
/**
|
||||
* This OpMode illustrates the basics of using the Vuforia engine to determine
|
||||
* the identity of Vuforia VuMarks encountered on the field. The code is structured as
|
||||
* a LinearOpMode. It shares much structure with {@link ConceptVuforiaNavigationWebcam}; we do not here
|
||||
* duplicate the core Vuforia documentation found there, but rather instead focus on the
|
||||
* differences between the use of Vuforia for navigation vs VuMark identification.
|
||||
*
|
||||
* @see ConceptVuforiaNavigationWebcam
|
||||
* @see VuforiaLocalizer
|
||||
* @see VuforiaTrackableDefaultListener
|
||||
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*
|
||||
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
|
||||
* is explained in {@link ConceptVuforiaNavigationWebcam}.
|
||||
*/
|
||||
|
||||
@TeleOp(name="Concept: VuMark Id Webcam", group ="Concept")
|
||||
@Disabled
|
||||
public class ConceptVuMarkIdentificationWebcam extends LinearOpMode {
|
||||
|
||||
public static final String TAG = "Vuforia VuMark Sample";
|
||||
|
||||
OpenGLMatrix lastLocation = null;
|
||||
|
||||
/**
|
||||
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
|
||||
* localization engine.
|
||||
*/
|
||||
VuforiaLocalizer vuforia;
|
||||
|
||||
/**
|
||||
* This is the webcam we are to use. As with other hardware devices such as motors and
|
||||
* servos, this device is identified using the robot configuration tool in the FTC application.
|
||||
*/
|
||||
WebcamName webcamName;
|
||||
|
||||
@Override public void runOpMode() {
|
||||
|
||||
/*
|
||||
* Retrieve the camera we are to use.
|
||||
*/
|
||||
webcamName = hardwareMap.get(WebcamName.class, "Webcam 1");
|
||||
|
||||
/*
|
||||
* To start up Vuforia, tell it the view that we wish to use for camera monitor (on the RC phone);
|
||||
* If no camera monitor is desired, use the parameterless constructor instead (commented out below).
|
||||
*/
|
||||
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
|
||||
|
||||
// OR... Do Not Activate the Camera Monitor View, to save power
|
||||
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
|
||||
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
|
||||
* web site at https://developer.vuforia.com/license-manager.
|
||||
*
|
||||
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
|
||||
* random data. As an example, here is a example of a fragment of a valid key:
|
||||
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
|
||||
* Once you've obtained a license key, copy the string from the Vuforia web site
|
||||
* and paste it in to your code on the next line, between the double quotes.
|
||||
*/
|
||||
parameters.vuforiaLicenseKey = " -- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
|
||||
|
||||
/**
|
||||
* We also indicate which camera on the RC we wish to use. For pedagogical purposes,
|
||||
* we use the same logic as in {@link ConceptVuforiaNavigationWebcam}.
|
||||
*/
|
||||
parameters.cameraName = webcamName;
|
||||
this.vuforia = ClassFactory.getInstance().createVuforia(parameters);
|
||||
|
||||
/**
|
||||
* Load the data set containing the VuMarks for Relic Recovery. There's only one trackable
|
||||
* in this data set: all three of the VuMarks in the game were created from this one template,
|
||||
* but differ in their instance id information.
|
||||
* @see VuMarkInstanceId
|
||||
*/
|
||||
VuforiaTrackables relicTrackables = this.vuforia.loadTrackablesFromAsset("RelicVuMark");
|
||||
VuforiaTrackable relicTemplate = relicTrackables.get(0);
|
||||
relicTemplate.setName("relicVuMarkTemplate"); // can help in debugging; otherwise not necessary
|
||||
|
||||
telemetry.addData(">", "Press Play to start");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
relicTrackables.activate();
|
||||
|
||||
while (opModeIsActive()) {
|
||||
|
||||
/**
|
||||
* See if any of the instances of {@link relicTemplate} are currently visible.
|
||||
* {@link RelicRecoveryVuMark} is an enum which can have the following values:
|
||||
* UNKNOWN, LEFT, CENTER, and RIGHT. When a VuMark is visible, something other than
|
||||
* UNKNOWN will be returned by {@link RelicRecoveryVuMark#from(VuforiaTrackable)}.
|
||||
*/
|
||||
RelicRecoveryVuMark vuMark = RelicRecoveryVuMark.from(relicTemplate);
|
||||
if (vuMark != RelicRecoveryVuMark.UNKNOWN) {
|
||||
|
||||
/* Found an instance of the template. In the actual game, you will probably
|
||||
* loop until this condition occurs, then move on to act accordingly depending
|
||||
* on which VuMark was visible. */
|
||||
telemetry.addData("VuMark", "%s visible", vuMark);
|
||||
|
||||
/* For fun, we also exhibit the navigational pose. In the Relic Recovery game,
|
||||
* it is perhaps unlikely that you will actually need to act on this pose information, but
|
||||
* we illustrate it nevertheless, for completeness. */
|
||||
OpenGLMatrix pose = ((VuforiaTrackableDefaultListener)relicTemplate.getListener()).getFtcCameraFromTarget();
|
||||
telemetry.addData("Pose", format(pose));
|
||||
|
||||
/* We further illustrate how to decompose the pose into useful rotational and
|
||||
* translational components */
|
||||
if (pose != null) {
|
||||
VectorF trans = pose.getTranslation();
|
||||
Orientation rot = Orientation.getOrientation(pose, AxesReference.EXTRINSIC, AxesOrder.XYZ, AngleUnit.DEGREES);
|
||||
|
||||
// Extract the X, Y, and Z components of the offset of the target relative to the robot
|
||||
double tX = trans.get(0);
|
||||
double tY = trans.get(1);
|
||||
double tZ = trans.get(2);
|
||||
|
||||
// Extract the rotational components of the target relative to the robot
|
||||
double rX = rot.firstAngle;
|
||||
double rY = rot.secondAngle;
|
||||
double rZ = rot.thirdAngle;
|
||||
}
|
||||
}
|
||||
else {
|
||||
telemetry.addData("VuMark", "not visible");
|
||||
}
|
||||
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
|
||||
String format(OpenGLMatrix transformationMatrix) {
|
||||
return (transformationMatrix != null) ? transformationMatrix.formatAsTransform() : "null";
|
||||
}
|
||||
}
|
@ -1,203 +0,0 @@
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.hardware.DcMotor;
|
||||
import com.qualcomm.robotcore.util.Range;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
|
||||
|
||||
/**
|
||||
* This OpMode illustrates using a webcam to locate and drive towards ANY Vuforia target.
|
||||
* The code assumes a basic two-wheel Robot Configuration with motors named left_drive and right_drive.
|
||||
* The motor directions must be set so a positive drive goes forward and a positive turn rotates to the right.
|
||||
*
|
||||
* Under manual control, the left stick will move forward/back, and the right stick will turn left/right.
|
||||
* This is called POV Joystick mode, different than Tank Drive (where each joystick controls a wheel).
|
||||
* Manually drive the robot until it displays Target data on the Driver Station.
|
||||
* Press and hold the *Left Bumper* to enable the automatic "Drive to target" mode.
|
||||
* Release the Left Bumper to return to manual driving mode.
|
||||
*
|
||||
* Use DESIRED_DISTANCE to set how close you want the robot to get to the target.
|
||||
* Speed and Turn sensitivity can be adjusted using the SPEED_GAIN and TURN_GAIN constants.
|
||||
*
|
||||
* For more Vuforia details, or to adapt this OpMode for a phone camera, view the
|
||||
* ConceptVuforiaFieldNavigation and ConceptVuforiaFieldNavigationWebcam samples.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
|
||||
*
|
||||
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
|
||||
* is explained below.
|
||||
*/
|
||||
|
||||
@TeleOp(name="Drive To Target", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptVuforiaDriveToTargetWebcam extends LinearOpMode
|
||||
{
|
||||
// Adjust these numbers to suit your robot.
|
||||
final double DESIRED_DISTANCE = 8.0; // this is how close the camera should get to the target (inches)
|
||||
// The GAIN constants set the relationship between the measured position error,
|
||||
// and how much power is applied to the drive motors. Drive = Error * Gain
|
||||
// Make these values smaller for smoother control.
|
||||
final double SPEED_GAIN = 0.02 ; // Speed Control "Gain". eg: Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
|
||||
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". eg: Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
|
||||
|
||||
final double MM_PER_INCH = 25.40 ; // Metric conversion
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
|
||||
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
|
||||
* web site at https://developer.vuforia.com/license-manager.
|
||||
*
|
||||
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
|
||||
* random data. As an example, here is a example of a fragment of a valid key:
|
||||
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
|
||||
* Once you've obtained a license key, copy the string from the Vuforia web site
|
||||
* and paste it in to your code on the next line, between the double quotes.
|
||||
*/
|
||||
private static final String VUFORIA_KEY =
|
||||
" --- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
|
||||
VuforiaLocalizer vuforia = null;
|
||||
OpenGLMatrix targetPose = null;
|
||||
String targetName = "";
|
||||
|
||||
private DcMotor leftDrive = null;
|
||||
private DcMotor rightDrive = null;
|
||||
|
||||
@Override public void runOpMode()
|
||||
{
|
||||
/*
|
||||
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
|
||||
* To get an on-phone camera preview, use the code below.
|
||||
* If no camera preview is desired, use the parameter-less constructor instead (commented out below).
|
||||
*/
|
||||
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
|
||||
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
parameters.vuforiaLicenseKey = VUFORIA_KEY;
|
||||
|
||||
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
|
||||
parameters.useExtendedTracking = false;
|
||||
|
||||
// Connect to the camera we are to use. This name must match what is set up in Robot Configuration
|
||||
parameters.cameraName = hardwareMap.get(WebcamName.class, "Webcam 1");
|
||||
this.vuforia = ClassFactory.getInstance().createVuforia(parameters);
|
||||
|
||||
// Load the trackable objects from the Assets file, and give them meaningful names
|
||||
VuforiaTrackables targetsFreightFrenzy = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
|
||||
targetsFreightFrenzy.get(0).setName("Blue Storage");
|
||||
targetsFreightFrenzy.get(1).setName("Blue Alliance Wall");
|
||||
targetsFreightFrenzy.get(2).setName("Red Storage");
|
||||
targetsFreightFrenzy.get(3).setName("Red Alliance Wall");
|
||||
|
||||
// Start tracking targets in the background
|
||||
targetsFreightFrenzy.activate();
|
||||
|
||||
// Initialize the hardware variables. Note that the strings used here as parameters
|
||||
// to 'get' must correspond to the names assigned during the robot configuration
|
||||
// step (using the FTC Robot Controller app on the phone).
|
||||
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
|
||||
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
|
||||
|
||||
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
|
||||
// When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive.
|
||||
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
|
||||
leftDrive.setDirection(DcMotor.Direction.REVERSE);
|
||||
rightDrive.setDirection(DcMotor.Direction.FORWARD);
|
||||
|
||||
telemetry.addData(">", "Press Play to start");
|
||||
telemetry.update();
|
||||
|
||||
waitForStart();
|
||||
|
||||
boolean targetFound = false; // Set to true when a target is detected by Vuforia
|
||||
double targetRange = 0; // Distance from camera to target in Inches
|
||||
double targetBearing = 0; // Robot Heading, relative to target. Positive degrees means target is to the right.
|
||||
double drive = 0; // Desired forward power (-1 to +1)
|
||||
double turn = 0; // Desired turning power (-1 to +1)
|
||||
|
||||
while (opModeIsActive())
|
||||
{
|
||||
// Look for first visible target, and save its pose.
|
||||
targetFound = false;
|
||||
for (VuforiaTrackable trackable : targetsFreightFrenzy)
|
||||
{
|
||||
if (((VuforiaTrackableDefaultListener) trackable.getListener()).isVisible())
|
||||
{
|
||||
targetPose = ((VuforiaTrackableDefaultListener)trackable.getListener()).getVuforiaCameraFromTarget();
|
||||
|
||||
// if we have a target, process the "pose" to determine the position of the target relative to the robot.
|
||||
if (targetPose != null)
|
||||
{
|
||||
targetFound = true;
|
||||
targetName = trackable.getName();
|
||||
VectorF trans = targetPose.getTranslation();
|
||||
|
||||
// Extract the X & Y components of the offset of the target relative to the robot
|
||||
double targetX = trans.get(0) / MM_PER_INCH; // Image X axis
|
||||
double targetY = trans.get(2) / MM_PER_INCH; // Image Z axis
|
||||
|
||||
// target range is based on distance from robot position to origin (right triangle).
|
||||
targetRange = Math.hypot(targetX, targetY);
|
||||
|
||||
// target bearing is based on angle formed between the X axis and the target range line
|
||||
targetBearing = Math.toDegrees(Math.asin(targetX / targetRange));
|
||||
|
||||
break; // jump out of target tracking loop if we find a target.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Tell the driver what we see, and what to do.
|
||||
if (targetFound) {
|
||||
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
|
||||
telemetry.addData("Target", " %s", targetName);
|
||||
telemetry.addData("Range", "%5.1f inches", targetRange);
|
||||
telemetry.addData("Bearing","%3.0f degrees", targetBearing);
|
||||
} else {
|
||||
telemetry.addData(">","Drive using joystick to find target\n");
|
||||
}
|
||||
|
||||
// Drive to target Automatically if Left Bumper is being pressed, AND we have found a target.
|
||||
if (gamepad1.left_bumper && targetFound) {
|
||||
|
||||
// Determine heading and range error so we can use them to control the robot automatically.
|
||||
double rangeError = (targetRange - DESIRED_DISTANCE);
|
||||
double headingError = targetBearing;
|
||||
|
||||
// Use the speed and turn "gains" to calculate how we want the robot to move.
|
||||
drive = rangeError * SPEED_GAIN;
|
||||
turn = headingError * TURN_GAIN ;
|
||||
|
||||
telemetry.addData("Auto","Drive %5.2f, Turn %5.2f", drive, turn);
|
||||
} else {
|
||||
|
||||
// drive using manual POV Joystick mode.
|
||||
drive = -gamepad1.left_stick_y / 2.0; // Reduce drive rate to 50%.
|
||||
turn = gamepad1.right_stick_x / 4.0; // Reduce turn rate to 25%.
|
||||
telemetry.addData("Manual","Drive %5.2f, Turn %5.2f", drive, turn);
|
||||
}
|
||||
telemetry.update();
|
||||
|
||||
// Calculate left and right wheel powers and send to them to the motors.
|
||||
double leftPower = Range.clip(drive + turn, -1.0, 1.0) ;
|
||||
double rightPower = Range.clip(drive - turn, -1.0, 1.0) ;
|
||||
leftDrive.setPower(leftPower);
|
||||
rightDrive.setPower(rightPower);
|
||||
|
||||
sleep(10);
|
||||
}
|
||||
}
|
||||
}
|
@ -1,284 +0,0 @@
|
||||
/* Copyright (c) 2019 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES;
|
||||
import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ;
|
||||
import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.YZX;
|
||||
import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC;
|
||||
import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK;
|
||||
|
||||
/**
|
||||
* This OpMode illustrates using the Vuforia localizer to determine positioning and orientation of
|
||||
* robot on the FTC field using the RC phone's camera. The code is structured as a LinearOpMode
|
||||
*
|
||||
* Note: If you are using a WEBCAM see ConceptVuforiaFieldNavigationWebcam.java
|
||||
*
|
||||
* When images are located, Vuforia is able to determine the position and orientation of the
|
||||
* image relative to the camera. This sample code then combines that information with a
|
||||
* knowledge of where the target images are on the field, to determine the location of the camera.
|
||||
*
|
||||
* Finally, the location of the camera on the robot is used to determine the
|
||||
* robot's location and orientation on the field.
|
||||
*
|
||||
* To learn more about the FTC field coordinate model, see FTC_FieldCoordinateSystemDefinition.pdf in this folder
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*
|
||||
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
|
||||
* is explained below.
|
||||
*/
|
||||
|
||||
@TeleOp(name="Vuforia Field Nav", group ="Concept")
|
||||
@Disabled
|
||||
public class ConceptVuforiaFieldNavigation extends LinearOpMode {
|
||||
|
||||
// IMPORTANT: For Phone Camera, set 1) the camera source and 2) the orientation, based on how your phone is mounted:
|
||||
// 1) Camera Source. Valid choices are: BACK (behind screen) or FRONT (selfie side)
|
||||
// 2) Phone Orientation. Choices are: PHONE_IS_PORTRAIT = true (portrait) or PHONE_IS_PORTRAIT = false (landscape)
|
||||
|
||||
private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK;
|
||||
private static final boolean PHONE_IS_PORTRAIT = false ;
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
|
||||
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
|
||||
* web site at https://developer.vuforia.com/license-manager.
|
||||
*
|
||||
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
|
||||
* random data. As an example, here is a example of a fragment of a valid key:
|
||||
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
|
||||
* Once you've obtained a license key, copy the string from the Vuforia web site
|
||||
* and paste it in to your code on the next line, between the double quotes.
|
||||
*/
|
||||
private static final String VUFORIA_KEY =
|
||||
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
|
||||
// Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
|
||||
// We will define some constants and conversions here. These are useful for the Freight Frenzy field.
|
||||
private static final float mmPerInch = 25.4f;
|
||||
private static final float mmTargetHeight = 6 * mmPerInch; // the height of the center of the target image above the floor
|
||||
private static final float halfField = 72 * mmPerInch;
|
||||
private static final float halfTile = 12 * mmPerInch;
|
||||
private static final float oneAndHalfTile = 36 * mmPerInch;
|
||||
|
||||
// Class Members
|
||||
private OpenGLMatrix lastLocation = null;
|
||||
private VuforiaLocalizer vuforia = null;
|
||||
private VuforiaTrackables targets = null ;
|
||||
|
||||
private boolean targetVisible = false;
|
||||
private float phoneXRotate = 0;
|
||||
private float phoneYRotate = 0;
|
||||
private float phoneZRotate = 0;
|
||||
|
||||
@Override public void runOpMode() {
|
||||
/*
|
||||
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
|
||||
* To get an on-phone camera preview, use the code below.
|
||||
* If no camera preview is desired, use the parameter-less constructor instead (commented out below).
|
||||
*/
|
||||
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
|
||||
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
parameters.vuforiaLicenseKey = VUFORIA_KEY;
|
||||
parameters.cameraDirection = CAMERA_CHOICE;
|
||||
|
||||
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
|
||||
parameters.useExtendedTracking = false;
|
||||
|
||||
// Instantiate the Vuforia engine
|
||||
vuforia = ClassFactory.getInstance().createVuforia(parameters);
|
||||
|
||||
// Load the data sets for the trackable objects. These particular data
|
||||
// sets are stored in the 'assets' part of our application.
|
||||
targets = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
|
||||
|
||||
// For convenience, gather together all the trackable objects in one easily-iterable collection */
|
||||
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
|
||||
allTrackables.addAll(targets);
|
||||
|
||||
/**
|
||||
* In order for localization to work, we need to tell the system where each target is on the field, and
|
||||
* where the phone resides on the robot. These specifications are in the form of <em>transformation matrices.</em>
|
||||
* Transformation matrices are a central, important concept in the math here involved in localization.
|
||||
* See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
|
||||
* for detailed information. Commonly, you'll encounter transformation matrices as instances
|
||||
* of the {@link OpenGLMatrix} class.
|
||||
*
|
||||
* If you are standing in the Red Alliance Station looking towards the center of the field,
|
||||
* - The X axis runs from your left to the right. (positive from the center to the right)
|
||||
* - The Y axis runs from the Red Alliance Station towards the other side of the field
|
||||
* where the Blue Alliance Station is. (Positive is from the center, towards the BlueAlliance station)
|
||||
* - The Z axis runs from the floor, upwards towards the ceiling. (Positive is above the floor)
|
||||
*
|
||||
* Before being transformed, each target image is conceptually located at the origin of the field's
|
||||
* coordinate system (the center of the field), facing up.
|
||||
*/
|
||||
|
||||
// Name and locate each trackable object
|
||||
identifyTarget(0, "Blue Storage", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
|
||||
identifyTarget(1, "Blue Alliance Wall", halfTile, halfField, mmTargetHeight, 90, 0, 0);
|
||||
identifyTarget(2, "Red Storage", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
|
||||
identifyTarget(3, "Red Alliance Wall", halfTile, -halfField, mmTargetHeight, 90, 0, 180);
|
||||
|
||||
/*
|
||||
* Create a transformation matrix describing where the phone is on the robot.
|
||||
*
|
||||
* NOTE !!!! It's very important that you turn OFF your phone's Auto-Screen-Rotation option.
|
||||
* Lock it into Portrait for these numbers to work.
|
||||
*
|
||||
* Info: The coordinate frame for the robot looks the same as the field.
|
||||
* The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
|
||||
* Z is UP on the robot. This equates to a heading angle of Zero degrees.
|
||||
*
|
||||
* The phone starts out lying flat, with the screen facing Up and with the physical top of the phone
|
||||
* pointing to the LEFT side of the Robot.
|
||||
* The two examples below assume that the camera is facing forward out the front of the robot.
|
||||
*/
|
||||
|
||||
// We need to rotate the camera around its long axis to bring the correct camera forward.
|
||||
if (CAMERA_CHOICE == BACK) {
|
||||
phoneYRotate = -90;
|
||||
} else {
|
||||
phoneYRotate = 90;
|
||||
}
|
||||
|
||||
// Rotate the phone vertical about the X axis if it's in portrait mode
|
||||
if (PHONE_IS_PORTRAIT) {
|
||||
phoneXRotate = 90 ;
|
||||
}
|
||||
|
||||
// Next, translate the camera lens to where it is on the robot.
|
||||
// In this example, it is centered on the robot (left-to-right and front-to-back), and 6 inches above ground level.
|
||||
final float CAMERA_FORWARD_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the forward distance from the center of the robot to the camera lens
|
||||
final float CAMERA_VERTICAL_DISPLACEMENT = 6.0f * mmPerInch; // eg: Camera is 6 Inches above ground
|
||||
final float CAMERA_LEFT_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the left distance from the center of the robot to the camera lens
|
||||
|
||||
OpenGLMatrix robotFromCamera = OpenGLMatrix
|
||||
.translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, YZX, DEGREES, phoneYRotate, phoneZRotate, phoneXRotate));
|
||||
|
||||
/** Let all the trackable listeners know where the phone is. */
|
||||
for (VuforiaTrackable trackable : allTrackables) {
|
||||
((VuforiaTrackableDefaultListener) trackable.getListener()).setPhoneInformation(robotFromCamera, parameters.cameraDirection);
|
||||
}
|
||||
|
||||
/*
|
||||
* WARNING:
|
||||
* In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
|
||||
* This sequence is used to enable the new remote DS Camera Stream feature to be used with this sample.
|
||||
* CONSEQUENTLY do not put any driving commands in this loop.
|
||||
* To restore the normal opmode structure, just un-comment the following line:
|
||||
*/
|
||||
|
||||
// waitForStart();
|
||||
|
||||
/* Note: To use the remote camera preview:
|
||||
* AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
|
||||
* Tap the preview window to receive a fresh image.
|
||||
* It is not permitted to transition to RUN while the camera preview window is active.
|
||||
* Either press STOP to exit the OpMode, or use the "options menu" again, and select "Camera Stream" to close the preview window.
|
||||
*/
|
||||
|
||||
targets.activate();
|
||||
while (!isStopRequested()) {
|
||||
|
||||
// check all the trackable targets to see which one (if any) is visible.
|
||||
targetVisible = false;
|
||||
for (VuforiaTrackable trackable : allTrackables) {
|
||||
if (((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible()) {
|
||||
telemetry.addData("Visible Target", trackable.getName());
|
||||
targetVisible = true;
|
||||
|
||||
// getUpdatedRobotLocation() will return null if no new information is available since
|
||||
// the last time that call was made, or if the trackable is not currently visible.
|
||||
OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation();
|
||||
if (robotLocationTransform != null) {
|
||||
lastLocation = robotLocationTransform;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Provide feedback as to where the robot is located (if we know).
|
||||
if (targetVisible) {
|
||||
// express position (translation) of robot in inches.
|
||||
VectorF translation = lastLocation.getTranslation();
|
||||
telemetry.addData("Pos (inches)", "{X, Y, Z} = %.1f, %.1f, %.1f",
|
||||
translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
|
||||
|
||||
// express the rotation of the robot in degrees.
|
||||
Orientation rotation = Orientation.getOrientation(lastLocation, EXTRINSIC, XYZ, DEGREES);
|
||||
telemetry.addData("Rot (deg)", "{Roll, Pitch, Heading} = %.0f, %.0f, %.0f", rotation.firstAngle, rotation.secondAngle, rotation.thirdAngle);
|
||||
}
|
||||
else {
|
||||
telemetry.addData("Visible Target", "none");
|
||||
}
|
||||
telemetry.update();
|
||||
}
|
||||
|
||||
// Disable Tracking when we are done;
|
||||
targets.deactivate();
|
||||
}
|
||||
|
||||
/***
|
||||
* Identify a target by naming it, and setting its position and orientation on the field
|
||||
* @param targetIndex
|
||||
* @param targetName
|
||||
* @param dx, dy, dz Target offsets in x,y,z axes
|
||||
* @param rx, ry, rz Target rotations in x,y,z axes
|
||||
*/
|
||||
void identifyTarget(int targetIndex, String targetName, float dx, float dy, float dz, float rx, float ry, float rz) {
|
||||
VuforiaTrackable aTarget = targets.get(targetIndex);
|
||||
aTarget.setName(targetName);
|
||||
aTarget.setLocation(OpenGLMatrix.translation(dx, dy, dz)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, rx, ry, rz)));
|
||||
}
|
||||
}
|
@ -1,274 +0,0 @@
|
||||
/* Copyright (c) 2019 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.firstinspires.ftc.robotcore.external.navigation.AngleUnit.DEGREES;
|
||||
import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XYZ;
|
||||
import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XZY;
|
||||
import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC;
|
||||
|
||||
/**
|
||||
* This OpMode illustrates using the Vuforia localizer to determine positioning and orientation of
|
||||
* robot on the FTC field using a WEBCAM. The code is structured as a LinearOpMode
|
||||
*
|
||||
* NOTE: If you are running on a Phone with a built-in camera, use the ConceptVuforiaFieldNavigation example instead of this one.
|
||||
* NOTE: It is possible to switch between multiple WebCams (eg: one for the left side and one for the right).
|
||||
* For a related example of how to do this, see ConceptTensorFlowObjectDetectionSwitchableCameras
|
||||
*
|
||||
* When images are located, Vuforia is able to determine the position and orientation of the
|
||||
* image relative to the camera. This sample code then combines that information with a
|
||||
* knowledge of where the target images are on the field, to determine the location of the camera.
|
||||
*
|
||||
* Finally, the location of the camera on the robot is used to determine the
|
||||
* robot's location and orientation on the field.
|
||||
*
|
||||
* To learn more about the FTC field coordinate model, see FTC_FieldCoordinateSystemDefinition.pdf in this folder
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*
|
||||
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
|
||||
* is explained below.
|
||||
*/
|
||||
|
||||
@TeleOp(name="Vuforia Field Nav Webcam", group ="Concept")
|
||||
@Disabled
|
||||
public class ConceptVuforiaFieldNavigationWebcam extends LinearOpMode {
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
|
||||
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
|
||||
* web site at https://developer.vuforia.com/license-manager.
|
||||
*
|
||||
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
|
||||
* random data. As an example, here is a example of a fragment of a valid key:
|
||||
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
|
||||
* Once you've obtained a license key, copy the string from the Vuforia web site
|
||||
* and paste it in to your code on the next line, between the double quotes.
|
||||
*/
|
||||
private static final String VUFORIA_KEY =
|
||||
" --- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
|
||||
// Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
|
||||
// We will define some constants and conversions here
|
||||
private static final float mmPerInch = 25.4f;
|
||||
private static final float mmTargetHeight = 6 * mmPerInch; // the height of the center of the target image above the floor
|
||||
private static final float halfField = 72 * mmPerInch;
|
||||
private static final float halfTile = 12 * mmPerInch;
|
||||
private static final float oneAndHalfTile = 36 * mmPerInch;
|
||||
|
||||
// Class Members
|
||||
private OpenGLMatrix lastLocation = null;
|
||||
private VuforiaLocalizer vuforia = null;
|
||||
private VuforiaTrackables targets = null ;
|
||||
private WebcamName webcamName = null;
|
||||
|
||||
private boolean targetVisible = false;
|
||||
|
||||
@Override public void runOpMode() {
|
||||
// Connect to the camera we are to use. This name must match what is set up in Robot Configuration
|
||||
webcamName = hardwareMap.get(WebcamName.class, "Webcam 1");
|
||||
|
||||
/*
|
||||
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
|
||||
* We can pass Vuforia the handle to a camera preview resource (on the RC screen);
|
||||
* If no camera-preview is desired, use the parameter-less constructor instead (commented out below).
|
||||
* Note: A preview window is required if you want to view the camera stream on the Driver Station Phone.
|
||||
*/
|
||||
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
|
||||
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
parameters.vuforiaLicenseKey = VUFORIA_KEY;
|
||||
|
||||
// We also indicate which camera we wish to use.
|
||||
parameters.cameraName = webcamName;
|
||||
|
||||
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
|
||||
parameters.useExtendedTracking = false;
|
||||
|
||||
// Instantiate the Vuforia engine
|
||||
vuforia = ClassFactory.getInstance().createVuforia(parameters);
|
||||
|
||||
// Load the data sets for the trackable objects. These particular data
|
||||
// sets are stored in the 'assets' part of our application.
|
||||
targets = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
|
||||
|
||||
// For convenience, gather together all the trackable objects in one easily-iterable collection */
|
||||
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
|
||||
allTrackables.addAll(targets);
|
||||
|
||||
/**
|
||||
* In order for localization to work, we need to tell the system where each target is on the field, and
|
||||
* where the phone resides on the robot. These specifications are in the form of <em>transformation matrices.</em>
|
||||
* Transformation matrices are a central, important concept in the math here involved in localization.
|
||||
* See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
|
||||
* for detailed information. Commonly, you'll encounter transformation matrices as instances
|
||||
* of the {@link OpenGLMatrix} class.
|
||||
*
|
||||
* If you are standing in the Red Alliance Station looking towards the center of the field,
|
||||
* - The X axis runs from your left to the right. (positive from the center to the right)
|
||||
* - The Y axis runs from the Red Alliance Station towards the other side of the field
|
||||
* where the Blue Alliance Station is. (Positive is from the center, towards the BlueAlliance station)
|
||||
* - The Z axis runs from the floor, upwards towards the ceiling. (Positive is above the floor)
|
||||
*
|
||||
* Before being transformed, each target image is conceptually located at the origin of the field's
|
||||
* coordinate system (the center of the field), facing up.
|
||||
*/
|
||||
|
||||
// Name and locate each trackable object
|
||||
identifyTarget(0, "Blue Storage", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
|
||||
identifyTarget(1, "Blue Alliance Wall", halfTile, halfField, mmTargetHeight, 90, 0, 0);
|
||||
identifyTarget(2, "Red Storage", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
|
||||
identifyTarget(3, "Red Alliance Wall", halfTile, -halfField, mmTargetHeight, 90, 0, 180);
|
||||
|
||||
/*
|
||||
* Create a transformation matrix describing where the camera is on the robot.
|
||||
*
|
||||
* Info: The coordinate frame for the robot looks the same as the field.
|
||||
* The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
|
||||
* Z is UP on the robot. This equates to a bearing angle of Zero degrees.
|
||||
*
|
||||
* For a WebCam, the default starting orientation of the camera is looking UP (pointing in the Z direction),
|
||||
* with the wide (horizontal) axis of the camera aligned with the X axis, and
|
||||
* the Narrow (vertical) axis of the camera aligned with the Y axis
|
||||
*
|
||||
* But, this example assumes that the camera is actually facing forward out the front of the robot.
|
||||
* So, the "default" camera position requires two rotations to get it oriented correctly.
|
||||
* 1) First it must be rotated +90 degrees around the X axis to get it horizontal (its now facing out the right side of the robot)
|
||||
* 2) Next it must be be rotated +90 degrees (counter-clockwise) around the Z axis to face forward.
|
||||
*
|
||||
* Finally the camera can be translated to its actual mounting position on the robot.
|
||||
* In this example, it is centered on the robot (left-to-right and front-to-back), and 6 inches above ground level.
|
||||
*/
|
||||
|
||||
final float CAMERA_FORWARD_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the forward distance from the center of the robot to the camera lens
|
||||
final float CAMERA_VERTICAL_DISPLACEMENT = 6.0f * mmPerInch; // eg: Camera is 6 Inches above ground
|
||||
final float CAMERA_LEFT_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the left distance from the center of the robot to the camera lens
|
||||
|
||||
OpenGLMatrix cameraLocationOnRobot = OpenGLMatrix
|
||||
.translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XZY, DEGREES, 90, 90, 0));
|
||||
|
||||
/** Let all the trackable listeners know where the camera is. */
|
||||
for (VuforiaTrackable trackable : allTrackables) {
|
||||
((VuforiaTrackableDefaultListener) trackable.getListener()).setCameraLocationOnRobot(parameters.cameraName, cameraLocationOnRobot);
|
||||
}
|
||||
|
||||
/*
|
||||
* WARNING:
|
||||
* In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
|
||||
* This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample.
|
||||
* CONSEQUENTLY do not put any driving commands in this loop.
|
||||
* To restore the normal opmode structure, just un-comment the following line:
|
||||
*/
|
||||
|
||||
// waitForStart();
|
||||
|
||||
/* Note: To use the remote camera preview:
|
||||
* AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
|
||||
* Tap the preview window to receive a fresh image.
|
||||
* It is not permitted to transition to RUN while the camera preview window is active.
|
||||
* Either press STOP to exit the OpMode, or use the "options menu" again, and select "Camera Stream" to close the preview window.
|
||||
*/
|
||||
|
||||
targets.activate();
|
||||
while (!isStopRequested()) {
|
||||
|
||||
// check all the trackable targets to see which one (if any) is visible.
|
||||
targetVisible = false;
|
||||
for (VuforiaTrackable trackable : allTrackables) {
|
||||
if (((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible()) {
|
||||
telemetry.addData("Visible Target", trackable.getName());
|
||||
targetVisible = true;
|
||||
|
||||
// getUpdatedRobotLocation() will return null if no new information is available since
|
||||
// the last time that call was made, or if the trackable is not currently visible.
|
||||
OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation();
|
||||
if (robotLocationTransform != null) {
|
||||
lastLocation = robotLocationTransform;
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Provide feedback as to where the robot is located (if we know).
|
||||
if (targetVisible) {
|
||||
// express position (translation) of robot in inches.
|
||||
VectorF translation = lastLocation.getTranslation();
|
||||
telemetry.addData("Pos (inches)", "{X, Y, Z} = %.1f, %.1f, %.1f",
|
||||
translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
|
||||
|
||||
// express the rotation of the robot in degrees.
|
||||
Orientation rotation = Orientation.getOrientation(lastLocation, EXTRINSIC, XYZ, DEGREES);
|
||||
telemetry.addData("Rot (deg)", "{Roll, Pitch, Heading} = %.0f, %.0f, %.0f", rotation.firstAngle, rotation.secondAngle, rotation.thirdAngle);
|
||||
}
|
||||
else {
|
||||
telemetry.addData("Visible Target", "none");
|
||||
}
|
||||
telemetry.update();
|
||||
}
|
||||
|
||||
// Disable Tracking when we are done;
|
||||
targets.deactivate();
|
||||
}
|
||||
|
||||
/***
|
||||
* Identify a target by naming it, and setting its position and orientation on the field
|
||||
* @param targetIndex
|
||||
* @param targetName
|
||||
* @param dx, dy, dz Target offsets in x,y,z axes
|
||||
* @param rx, ry, rz Target rotations in x,y,z axes
|
||||
*/
|
||||
void identifyTarget(int targetIndex, String targetName, float dx, float dy, float dz, float rx, float ry, float rz) {
|
||||
VuforiaTrackable aTarget = targets.get(targetIndex);
|
||||
aTarget.setName(targetName);
|
||||
aTarget.setLocation(OpenGLMatrix.translation(dx, dy, dz)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, rx, ry, rz)));
|
||||
}
|
||||
}
|
Binary file not shown.
@ -1,4 +1,4 @@
|
||||
/* Copyright (c) 2017 FIRST. All rights reserved.
|
||||
/* Copyright (c) 2022 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
@ -29,7 +29,7 @@
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.hardware.modernrobotics.ModernRoboticsI2cGyro;
|
||||
import com.qualcomm.hardware.bosch.BNO055IMU;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
@ -37,36 +37,56 @@ import com.qualcomm.robotcore.hardware.DcMotor;
|
||||
import com.qualcomm.robotcore.util.ElapsedTime;
|
||||
import com.qualcomm.robotcore.util.Range;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
|
||||
|
||||
/**
|
||||
* This file illustrates the concept of driving a path based on Gyro heading and encoder counts.
|
||||
* The code is structured as a LinearOpMode
|
||||
* This file illustrates the concept of driving an autonomous path based on Gyro heading and encoder counts.
|
||||
* The code is structured as a LinearOpMode
|
||||
*
|
||||
* The code REQUIRES that you DO have encoders on the wheels,
|
||||
* otherwise you would use: RobotAutoDriveByTime;
|
||||
* The path to be followed by the robot is built from a series of drive, turn or pause steps.
|
||||
* Each step on the path is defined by a single function call, and these can be strung together in any order.
|
||||
*
|
||||
* This code ALSO requires that you have a Modern Robotics I2C gyro with the name "gyro"
|
||||
* otherwise you would use: RobotAutoDriveByEncoder;
|
||||
* The code REQUIRES that you have encoders on the drive motors, otherwise you should use: RobotAutoDriveByTime;
|
||||
*
|
||||
* This code requires that the drive Motors have been configured such that a positive
|
||||
* power command moves them forward, and causes the encoders to count UP.
|
||||
* This code ALSO requires that you have a BOSCH BNO055 IMU, otherwise you would use: RobotAutoDriveByEncoder;
|
||||
* This IMU is found in REV Control/Expansion Hubs shipped prior to July 2022, and possibly also on later models.
|
||||
* To run as written, the Control/Expansion hub should be mounted horizontally on a flat part of the robot chassis.
|
||||
*
|
||||
* This code uses the RUN_TO_POSITION mode to enable the Motor controllers to generate the run profile
|
||||
* This sample requires that the drive Motors have been configured with names : left_drive and right_drive.
|
||||
* It also requires that a positive power command moves both motors forward, and causes the encoders to count UP.
|
||||
* So please verify that both of your motors move the robot forward on the first move. If not, make the required correction.
|
||||
* See the beginning of runOpMode() to set the FORWARD/REVERSE option for each motor.
|
||||
*
|
||||
* In order to calibrate the Gyro correctly, the robot must remain stationary during calibration.
|
||||
* This is performed when the INIT button is pressed on the Driver Station.
|
||||
* This code assumes that the robot is stationary when the INIT button is pressed.
|
||||
* If this is not the case, then the INIT should be performed again.
|
||||
* This code uses RUN_TO_POSITION mode for driving straight, and RUN_USING_ENCODER mode for turning and holding.
|
||||
* Note: You must call setTargetPosition() at least once before switching to RUN_TO_POSITION mode.
|
||||
*
|
||||
* Note: in this example, all angles are referenced to the initial coordinate frame set during the
|
||||
* the Gyro Calibration process, or whenever the program issues a resetZAxisIntegrator() call on the Gyro.
|
||||
* Notes:
|
||||
*
|
||||
* All angles are referenced to the coordinate-frame that is set whenever resetHeading() is called.
|
||||
* In this sample, the heading is reset when the Start button is touched on the Driver station.
|
||||
* Note: It would be possible to reset the heading after each move, but this would accumulate steering errors.
|
||||
*
|
||||
* The angle of movement/rotation is assumed to be a standardized rotation around the robot Z axis,
|
||||
* which means that a Positive rotation is Counter Clockwise, looking down on the field.
|
||||
* This is consistent with the FTC field coordinate conventions set out in the document:
|
||||
* ftc_app\doc\tutorial\FTC_FieldCoordinateSystemDefinition.pdf
|
||||
* https://ftc-docs.firstinspires.org/field-coordinate-system
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
|
||||
* Control Approach.
|
||||
*
|
||||
* To reach, or maintain a required heading, this code implements a basic Proportional Controller where:
|
||||
*
|
||||
* Steering power = Heading Error * Proportional Gain.
|
||||
*
|
||||
* "Heading Error" is calculated by taking the difference between the desired heading and the actual heading,
|
||||
* and then "normalizing" it by converting it to a value in the +/- 180 degree range.
|
||||
*
|
||||
* "Proportional Gain" is a constant that YOU choose to set the "strength" of the steering response.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your "TeamCode" folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||
*/
|
||||
|
||||
@Autonomous(name="Robot: Auto Drive By Gyro", group="Robot")
|
||||
@ -76,9 +96,21 @@ public class RobotAutoDriveByGyro_Linear extends LinearOpMode {
|
||||
/* Declare OpMode members. */
|
||||
private DcMotor leftDrive = null;
|
||||
private DcMotor rightDrive = null;
|
||||
ModernRoboticsI2cGyro gyro = null; // Additional Gyro device
|
||||
private BNO055IMU imu = null; // Control/Expansion Hub IMU
|
||||
|
||||
private double robotHeading = 0;
|
||||
private double robotHeading = 0;
|
||||
private double headingOffset = 0;
|
||||
private double headingError = 0;
|
||||
|
||||
// These variable are declared here (as class members) so they can be updated in various methods,
|
||||
// but still be displayed by sendTelemetry()
|
||||
private double targetHeading = 0;
|
||||
private double driveSpeed = 0;
|
||||
private double turnSpeed = 0;
|
||||
private double leftSpeed = 0;
|
||||
private double rightSpeed = 0;
|
||||
private int leftTarget = 0;
|
||||
private int rightTarget = 0;
|
||||
|
||||
// Calculate the COUNTS_PER_INCH for your specific drive train.
|
||||
// Go to your motor vendor website to determine your motor's COUNTS_PER_MOTOR_REV
|
||||
@ -86,20 +118,24 @@ public class RobotAutoDriveByGyro_Linear extends LinearOpMode {
|
||||
// For example, use a value of 2.0 for a 12-tooth spur gear driving a 24-tooth spur gear.
|
||||
// This is gearing DOWN for less speed and more torque.
|
||||
// For gearing UP, use a gear ratio less than 1.0. Note this will affect the direction of wheel rotation.
|
||||
static final double COUNTS_PER_MOTOR_REV = 1440 ; // eg: TETRIX Motor Encoder
|
||||
static final double COUNTS_PER_MOTOR_REV = 537.7 ; // eg: GoBILDA 312 RPM Yellow Jacket
|
||||
static final double DRIVE_GEAR_REDUCTION = 1.0 ; // No External Gearing.
|
||||
static final double WHEEL_DIAMETER_INCHES = 4.0 ; // For figuring circumference
|
||||
static final double COUNTS_PER_INCH = (COUNTS_PER_MOTOR_REV * DRIVE_GEAR_REDUCTION) /
|
||||
(WHEEL_DIAMETER_INCHES * 3.1415);
|
||||
|
||||
// These constants define the desired driving/control characteristics
|
||||
// The can/should be tweaked to suite the specific robot drive train.
|
||||
static final double DRIVE_SPEED = 0.7; // Nominal speed for better accuracy.
|
||||
static final double TURN_SPEED = 0.5; // Nominal half speed for better accuracy.
|
||||
|
||||
static final double HEADING_THRESHOLD = 1 ; // As tight as we can make it with an integer gyro
|
||||
static final double P_TURN_COEFF = 0.1; // Larger is more responsive, but also less stable
|
||||
static final double P_DRIVE_COEFF = 0.15; // Larger is more responsive, but also less stable
|
||||
// They can/should be tweaked to suit the specific robot drive train.
|
||||
static final double DRIVE_SPEED = 0.4; // Max driving speed for better distance accuracy.
|
||||
static final double TURN_SPEED = 0.2; // Max Turn speed to limit turn rate
|
||||
static final double HEADING_THRESHOLD = 1.0 ; // How close must the heading get to the target before moving to next step.
|
||||
// Requiring more accuracy (a smaller number) will often make the turn take longer to get into the final position.
|
||||
// Define the Proportional control coefficient (or GAIN) for "heading control".
|
||||
// We define one value when Turning (larger errors), and the other is used when Driving straight (smaller errors).
|
||||
// Increase these numbers if the heading does not corrects strongly enough (eg: a heavy robot or using tracks)
|
||||
// Decrease these numbers if the heading does not settle on the correct value (eg: very agile robot with omni wheels)
|
||||
static final double P_TURN_GAIN = 0.02; // Larger is more responsive, but also less stable
|
||||
static final double P_DRIVE_GAIN = 0.03; // Larger is more responsive, but also less stable
|
||||
|
||||
|
||||
@Override
|
||||
@ -115,145 +151,118 @@ public class RobotAutoDriveByGyro_Linear extends LinearOpMode {
|
||||
leftDrive.setDirection(DcMotor.Direction.REVERSE);
|
||||
rightDrive.setDirection(DcMotor.Direction.FORWARD);
|
||||
|
||||
gyro = (ModernRoboticsI2cGyro)hardwareMap.gyroSensor.get("gyro");
|
||||
// define initialization values for IMU, and then initialize it.
|
||||
BNO055IMU.Parameters parameters = new BNO055IMU.Parameters();
|
||||
parameters.angleUnit = BNO055IMU.AngleUnit.DEGREES;
|
||||
imu = hardwareMap.get(BNO055IMU.class, "imu");
|
||||
imu.initialize(parameters);
|
||||
|
||||
// Ensure the robot it stationary, then reset the encoders and calibrate the gyro.
|
||||
// Ensure the robot is stationary. Reset the encoders and set the motors to BRAKE mode
|
||||
leftDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
|
||||
rightDrive.setMode(DcMotor.RunMode.STOP_AND_RESET_ENCODER);
|
||||
leftDrive.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
|
||||
rightDrive.setZeroPowerBehavior(DcMotor.ZeroPowerBehavior.BRAKE);
|
||||
|
||||
// Send telemetry message to alert driver that we are calibrating;
|
||||
telemetry.addData(">", "Calibrating Gyro"); //
|
||||
telemetry.update();
|
||||
|
||||
gyro.calibrate();
|
||||
|
||||
// make sure the gyro is calibrated before continuing
|
||||
while (!isStopRequested() && gyro.isCalibrating()) {
|
||||
sleep(50);
|
||||
idle();
|
||||
}
|
||||
|
||||
telemetry.addData(">", "Robot Ready."); //
|
||||
telemetry.update();
|
||||
|
||||
leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
|
||||
// Wait for the game to start (Display Gyro value)
|
||||
// Wait for the game to start (Display Gyro value while waiting)
|
||||
while (opModeInInit()) {
|
||||
telemetry.addData(">", "Robot Heading = %4.0f", getHeading());
|
||||
telemetry.addData(">", "Robot Heading = %4.0f", getRawHeading());
|
||||
telemetry.update();
|
||||
}
|
||||
|
||||
// Reset gyro before we move..
|
||||
gyro.resetZAxisIntegrator();
|
||||
getHeading();
|
||||
// Set the encoders for closed loop speed control, and reset the heading.
|
||||
leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
resetHeading();
|
||||
|
||||
// Step through each leg of the path,
|
||||
// Note: Reverse movement is obtained by setting a negative distance (not speed)
|
||||
// Put a hold after each turn
|
||||
gyroDrive(DRIVE_SPEED, 48.0, 0.0); // Drive FWD 48 inches
|
||||
gyroTurn( TURN_SPEED, -45.0); // Turn CW to -45 Degrees
|
||||
gyroHold( TURN_SPEED, -45.0, 0.5); // Hold -45 Deg heading for a 1/2 second
|
||||
gyroDrive(DRIVE_SPEED, 12.0, -45.0); // Drive FWD 12 inches at 45 degrees
|
||||
gyroTurn( TURN_SPEED, 45.0); // Turn CCW to 45 Degrees
|
||||
gyroHold( TURN_SPEED, 45.0, 0.5); // Hold 45 Deg heading for a 1/2 second
|
||||
gyroTurn( TURN_SPEED, 0.0); // Turn CW to 0 Degrees
|
||||
gyroHold( TURN_SPEED, 0.0, 1.0); // Hold 0 Deg heading for a 1 second
|
||||
gyroDrive(DRIVE_SPEED,-48.0, 0.0); // Drive REV 48 inches
|
||||
// Notes: Reverse movement is obtained by setting a negative distance (not speed)
|
||||
// holdHeading() is used after turns to let the heading stabilize
|
||||
// Add a sleep(2000) after any step to keep the telemetry data visible for review
|
||||
|
||||
driveStraight(DRIVE_SPEED, 24.0, 0.0); // Drive Forward 24"
|
||||
turnToHeading( TURN_SPEED, -45.0); // Turn CW to -45 Degrees
|
||||
holdHeading( TURN_SPEED, -45.0, 0.5); // Hold -45 Deg heading for a 1/2 second
|
||||
|
||||
driveStraight(DRIVE_SPEED, 17.0, -45.0); // Drive Forward 17" at -45 degrees (12"x and 12"y)
|
||||
turnToHeading( TURN_SPEED, 45.0); // Turn CCW to 45 Degrees
|
||||
holdHeading( TURN_SPEED, 45.0, 0.5); // Hold 45 Deg heading for a 1/2 second
|
||||
|
||||
driveStraight(DRIVE_SPEED, 17.0, 45.0); // Drive Forward 17" at 45 degrees (-12"x and 12"y)
|
||||
turnToHeading( TURN_SPEED, 0.0); // Turn CW to 0 Degrees
|
||||
holdHeading( TURN_SPEED, 0.0, 1.0); // Hold 0 Deg heading for 1 second
|
||||
|
||||
driveStraight(DRIVE_SPEED,-48.0, 0.0); // Drive in Reverse 48" (should return to approx. staring position)
|
||||
|
||||
telemetry.addData("Path", "Complete");
|
||||
telemetry.addData("Final Heading", "%5.0f", getHeading());
|
||||
telemetry.update();
|
||||
sleep(1000); // Pause to display last telemetry message.
|
||||
}
|
||||
|
||||
/**
|
||||
* Method to drive on a fixed compass bearing (angle), based on encoder counts.
|
||||
/*
|
||||
* ====================================================================================================
|
||||
* Driving "Helper" functions are below this line.
|
||||
* These provide the high and low level methods that handle driving straight and turning.
|
||||
* ====================================================================================================
|
||||
*/
|
||||
|
||||
// ********** HIGH Level driving functions. ********************
|
||||
|
||||
/**
|
||||
* Method to drive in a straight line, on a fixed compass heading (angle), based on encoder counts.
|
||||
* Move will stop if either of these conditions occur:
|
||||
* 1) Move gets to the desired position
|
||||
* 2) Driver stops the opmode running.
|
||||
*
|
||||
* @param speed Target speed for forward motion. Should allow for +/- variance for adjusting heading
|
||||
* @param maxDriveSpeed MAX Speed for forward/rev motion (range 0 to +1.0) .
|
||||
* @param distance Distance (in inches) to move from current position. Negative distance means move backward.
|
||||
* @param angle Absolute Angle (in Degrees) relative to last gyro reset.
|
||||
* @param heading Absolute Heading Angle (in Degrees) relative to last gyro reset.
|
||||
* 0 = fwd. +ve is CCW from fwd. -ve is CW from forward.
|
||||
* If a relative angle is required, add/subtract from current heading.
|
||||
* If a relative angle is required, add/subtract from the current robotHeading.
|
||||
*/
|
||||
public void gyroDrive ( double speed,
|
||||
double distance,
|
||||
double angle) {
|
||||
|
||||
int newLeftTarget;
|
||||
int newRightTarget;
|
||||
int moveCounts;
|
||||
double max;
|
||||
double error;
|
||||
double steer;
|
||||
double leftSpeed;
|
||||
double rightSpeed;
|
||||
public void driveStraight(double maxDriveSpeed,
|
||||
double distance,
|
||||
double heading) {
|
||||
|
||||
// Ensure that the opmode is still active
|
||||
if (opModeIsActive()) {
|
||||
|
||||
// Determine new target position, and pass to motor controller
|
||||
moveCounts = (int)(distance * COUNTS_PER_INCH);
|
||||
newLeftTarget = leftDrive.getCurrentPosition() + moveCounts;
|
||||
newRightTarget = rightDrive.getCurrentPosition() + moveCounts;
|
||||
int moveCounts = (int)(distance * COUNTS_PER_INCH);
|
||||
leftTarget = leftDrive.getCurrentPosition() + moveCounts;
|
||||
rightTarget = rightDrive.getCurrentPosition() + moveCounts;
|
||||
|
||||
// Set Target and Turn On RUN_TO_POSITION
|
||||
leftDrive.setTargetPosition(newLeftTarget);
|
||||
rightDrive.setTargetPosition(newRightTarget);
|
||||
// Set Target FIRST, then turn on RUN_TO_POSITION
|
||||
leftDrive.setTargetPosition(leftTarget);
|
||||
rightDrive.setTargetPosition(rightTarget);
|
||||
|
||||
leftDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
|
||||
rightDrive.setMode(DcMotor.RunMode.RUN_TO_POSITION);
|
||||
|
||||
// start motion.
|
||||
speed = Range.clip(Math.abs(speed), 0.0, 1.0);
|
||||
leftDrive.setPower(speed);
|
||||
rightDrive.setPower(speed);
|
||||
// Set the required driving speed (must be positive for RUN_TO_POSITION)
|
||||
// Start driving straight, and then enter the control loop
|
||||
maxDriveSpeed = Math.abs(maxDriveSpeed);
|
||||
moveRobot(maxDriveSpeed, 0);
|
||||
|
||||
// keep looping while we are still active, and BOTH motors are running.
|
||||
while (opModeIsActive() &&
|
||||
(leftDrive.isBusy() && rightDrive.isBusy())) {
|
||||
|
||||
// adjust relative speed based on heading error.
|
||||
error = getError(angle);
|
||||
steer = getSteer(error, P_DRIVE_COEFF);
|
||||
// Determine required steering to keep on heading
|
||||
turnSpeed = getSteeringCorrection(heading, P_DRIVE_GAIN);
|
||||
|
||||
// if driving in reverse, the motor correction also needs to be reversed
|
||||
if (distance < 0)
|
||||
steer *= -1.0;
|
||||
turnSpeed *= -1.0;
|
||||
|
||||
leftSpeed = speed - steer;
|
||||
rightSpeed = speed + steer;
|
||||
|
||||
// Normalize speeds if either one exceeds +/- 1.0;
|
||||
max = Math.max(Math.abs(leftSpeed), Math.abs(rightSpeed));
|
||||
if (max > 1.0)
|
||||
{
|
||||
leftSpeed /= max;
|
||||
rightSpeed /= max;
|
||||
}
|
||||
|
||||
leftDrive.setPower(leftSpeed);
|
||||
rightDrive.setPower(rightSpeed);
|
||||
// Apply the turning correction to the current driving speed.
|
||||
moveRobot(driveSpeed, turnSpeed);
|
||||
|
||||
// Display drive status for the driver.
|
||||
telemetry.addData("Angle Target:Current", "%5.2f:%5.0f", angle, robotHeading);
|
||||
telemetry.addData("Error:Steer", "%5.1f:%5.1f", error, steer);
|
||||
telemetry.addData("Target L:R", "%7d:%7d", newLeftTarget, newRightTarget);
|
||||
telemetry.addData("Actual L:R", "%7d:%7d", leftDrive.getCurrentPosition(),
|
||||
rightDrive.getCurrentPosition());
|
||||
telemetry.addData("Speed L:R", "%5.2f:%5.2f", leftSpeed, rightSpeed);
|
||||
telemetry.update();
|
||||
sendTelemetry(true);
|
||||
}
|
||||
|
||||
// Stop all motion;
|
||||
leftDrive.setPower(0);
|
||||
rightDrive.setPower(0);
|
||||
|
||||
// Turn off RUN_TO_POSITION
|
||||
// Stop all motion & Turn off RUN_TO_POSITION
|
||||
moveRobot(0, 0);
|
||||
leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
}
|
||||
@ -265,124 +274,158 @@ public class RobotAutoDriveByGyro_Linear extends LinearOpMode {
|
||||
* 1) Move gets to the heading (angle)
|
||||
* 2) Driver stops the opmode running.
|
||||
*
|
||||
* @param speed Desired speed of turn.
|
||||
* @param angle Absolute Angle (in Degrees) relative to last gyro reset.
|
||||
* 0 = fwd. +ve is CCW from fwd. -ve is CW from forward.
|
||||
* If a relative angle is required, add/subtract from current heading.
|
||||
* @param maxTurnSpeed Desired MAX speed of turn. (range 0 to +1.0)
|
||||
* @param heading Absolute Heading Angle (in Degrees) relative to last gyro reset.
|
||||
* 0 = fwd. +ve is CCW from fwd. -ve is CW from forward.
|
||||
* If a relative angle is required, add/subtract from current heading.
|
||||
*/
|
||||
public void gyroTurn ( double speed, double angle) {
|
||||
public void turnToHeading(double maxTurnSpeed, double heading) {
|
||||
|
||||
// Run getSteeringCorrection() once to pre-calculate the current error
|
||||
getSteeringCorrection(heading, P_DRIVE_GAIN);
|
||||
|
||||
// keep looping while we are still active, and not on heading.
|
||||
while (opModeIsActive() && !onHeading(speed, angle, P_TURN_COEFF)) {
|
||||
// Update telemetry & Allow time for other processes to run.
|
||||
telemetry.update();
|
||||
while (opModeIsActive() && (Math.abs(headingError) > HEADING_THRESHOLD)) {
|
||||
|
||||
// Determine required steering to keep on heading
|
||||
turnSpeed = getSteeringCorrection(heading, P_TURN_GAIN);
|
||||
|
||||
// Clip the speed to the maximum permitted value.
|
||||
turnSpeed = Range.clip(turnSpeed, -maxTurnSpeed, maxTurnSpeed);
|
||||
|
||||
// Pivot in place by applying the turning correction
|
||||
moveRobot(0, turnSpeed);
|
||||
|
||||
// Display drive status for the driver.
|
||||
sendTelemetry(false);
|
||||
}
|
||||
|
||||
// Stop all motion;
|
||||
moveRobot(0, 0);
|
||||
}
|
||||
|
||||
/**
|
||||
* Method to obtain & hold a heading for a finite amount of time
|
||||
* Move will stop once the requested time has elapsed
|
||||
* This function is useful for giving the robot a moment to stabilize it's heading between movements.
|
||||
*
|
||||
* @param speed Desired speed of turn.
|
||||
* @param angle Absolute Angle (in Degrees) relative to last gyro reset.
|
||||
* @param maxTurnSpeed Maximum differential turn speed (range 0 to +1.0)
|
||||
* @param heading Absolute Heading Angle (in Degrees) relative to last gyro reset.
|
||||
* 0 = fwd. +ve is CCW from fwd. -ve is CW from forward.
|
||||
* If a relative angle is required, add/subtract from current heading.
|
||||
* @param holdTime Length of time (in seconds) to hold the specified heading.
|
||||
*/
|
||||
public void gyroHold( double speed, double angle, double holdTime) {
|
||||
public void holdHeading(double maxTurnSpeed, double heading, double holdTime) {
|
||||
|
||||
ElapsedTime holdTimer = new ElapsedTime();
|
||||
holdTimer.reset();
|
||||
|
||||
// keep looping while we have time remaining.
|
||||
holdTimer.reset();
|
||||
while (opModeIsActive() && (holdTimer.time() < holdTime)) {
|
||||
// Update telemetry & Allow time for other processes to run.
|
||||
onHeading(speed, angle, P_TURN_COEFF);
|
||||
telemetry.update();
|
||||
// Determine required steering to keep on heading
|
||||
turnSpeed = getSteeringCorrection(heading, P_TURN_GAIN);
|
||||
|
||||
// Clip the speed to the maximum permitted value.
|
||||
turnSpeed = Range.clip(turnSpeed, -maxTurnSpeed, maxTurnSpeed);
|
||||
|
||||
// Pivot in place by applying the turning correction
|
||||
moveRobot(0, turnSpeed);
|
||||
|
||||
// Display drive status for the driver.
|
||||
sendTelemetry(false);
|
||||
}
|
||||
|
||||
// Stop all motion;
|
||||
leftDrive.setPower(0);
|
||||
rightDrive.setPower(0);
|
||||
moveRobot(0, 0);
|
||||
}
|
||||
|
||||
// ********** LOW Level driving functions. ********************
|
||||
|
||||
/**
|
||||
* This method uses a Proportional Controller to determine how much steering correction is required.
|
||||
*
|
||||
* @param desiredHeading The desired absolute heading (relative to last heading reset)
|
||||
* @param proportionalGain Gain factor applied to heading error to obtain turning power.
|
||||
* @return Turning power needed to get to required heading.
|
||||
*/
|
||||
public double getSteeringCorrection(double desiredHeading, double proportionalGain) {
|
||||
targetHeading = desiredHeading; // Save for telemetry
|
||||
|
||||
// Get the robot heading by applying an offset to the IMU heading
|
||||
robotHeading = getRawHeading() - headingOffset;
|
||||
|
||||
// Determine the heading current error
|
||||
headingError = targetHeading - robotHeading;
|
||||
|
||||
// Normalize the error to be within +/- 180 degrees
|
||||
while (headingError > 180) headingError -= 360;
|
||||
while (headingError <= -180) headingError += 360;
|
||||
|
||||
// Multiply the error by the gain to determine the required steering correction/ Limit the result to +/- 1.0
|
||||
return Range.clip(headingError * proportionalGain, -1, 1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform one cycle of closed loop heading control.
|
||||
*
|
||||
* @param speed Desired speed of turn.
|
||||
* @param angle Absolute Angle (in Degrees) relative to last gyro reset.
|
||||
* 0 = fwd. +ve is CCW from fwd. -ve is CW from forward.
|
||||
* If a relative angle is required, add/subtract from current heading.
|
||||
* @param PCoeff Proportional Gain coefficient
|
||||
* @return
|
||||
* This method takes separate drive (fwd/rev) and turn (right/left) requests,
|
||||
* combines them, and applies the appropriate speed commands to the left and right wheel motors.
|
||||
* @param drive forward motor speed
|
||||
* @param turn clockwise turning motor speed.
|
||||
*/
|
||||
boolean onHeading(double speed, double angle, double PCoeff) {
|
||||
double error ;
|
||||
double steer ;
|
||||
boolean onTarget = false ;
|
||||
double leftSpeed;
|
||||
double rightSpeed;
|
||||
public void moveRobot(double drive, double turn) {
|
||||
driveSpeed = drive; // save this value as a class member so it can be used by telemetry.
|
||||
turnSpeed = turn; // save this value as a class member so it can be used by telemetry.
|
||||
|
||||
// determine turn power based on +/- error
|
||||
error = getError(angle);
|
||||
leftSpeed = drive - turn;
|
||||
rightSpeed = drive + turn;
|
||||
|
||||
if (Math.abs(error) <= HEADING_THRESHOLD) {
|
||||
steer = 0.0;
|
||||
leftSpeed = 0.0;
|
||||
rightSpeed = 0.0;
|
||||
onTarget = true;
|
||||
}
|
||||
else {
|
||||
steer = getSteer(error, PCoeff);
|
||||
rightSpeed = speed * steer;
|
||||
leftSpeed = -rightSpeed;
|
||||
// Scale speeds down if either one exceeds +/- 1.0;
|
||||
double max = Math.max(Math.abs(leftSpeed), Math.abs(rightSpeed));
|
||||
if (max > 1.0)
|
||||
{
|
||||
leftSpeed /= max;
|
||||
rightSpeed /= max;
|
||||
}
|
||||
|
||||
// Send desired speeds to motors.
|
||||
leftDrive.setPower(leftSpeed);
|
||||
rightDrive.setPower(rightSpeed);
|
||||
|
||||
// Display it for the driver.
|
||||
telemetry.addData("Target/Current", "%5.2f / %5.0f", angle, robotHeading);
|
||||
telemetry.addData("Error/Steer", "%5.2f / %5.2f", error, steer);
|
||||
telemetry.addData("Speed.", "%5.2f : %5.2f", leftSpeed, rightSpeed);
|
||||
|
||||
return onTarget;
|
||||
}
|
||||
|
||||
/**
|
||||
* getError determines the error between the target angle and the robot's current heading
|
||||
* @param targetAngle Desired angle (relative to global reference established at last Gyro Reset).
|
||||
* @return error angle: Degrees in the range +/- 180. Centered on the robot's frame of reference
|
||||
* +ve error means the robot should turn LEFT (CCW) to reduce error.
|
||||
* Display the various control parameters while driving
|
||||
*
|
||||
* @param straight Set to true if we are driving straight, and the encoder positions should be included in the telemetry.
|
||||
*/
|
||||
public double getError(double targetAngle) {
|
||||
private void sendTelemetry(boolean straight) {
|
||||
|
||||
double robotError;
|
||||
if (straight) {
|
||||
telemetry.addData("Motion", "Drive Straight");
|
||||
telemetry.addData("Target Pos L:R", "%7d:%7d", leftTarget, rightTarget);
|
||||
telemetry.addData("Actual Pos L:R", "%7d:%7d", leftDrive.getCurrentPosition(),
|
||||
rightDrive.getCurrentPosition());
|
||||
} else {
|
||||
telemetry.addData("Motion", "Turning");
|
||||
}
|
||||
|
||||
// calculate error in -179 to +180 range (
|
||||
robotError = targetAngle - getHeading();
|
||||
while (robotError > 180) robotError -= 360;
|
||||
while (robotError <= -180) robotError += 360;
|
||||
return robotError;
|
||||
telemetry.addData("Angle Target:Current", "%5.2f:%5.0f", targetHeading, robotHeading);
|
||||
telemetry.addData("Error:Steer", "%5.1f:%5.1f", headingError, turnSpeed);
|
||||
telemetry.addData("Wheel Speeds L:R.", "%5.2f : %5.2f", leftSpeed, rightSpeed);
|
||||
telemetry.update();
|
||||
}
|
||||
|
||||
/**
|
||||
* read and save the current robot heading
|
||||
* read the raw (un-offset Gyro heading) directly from the IMU
|
||||
*/
|
||||
public double getHeading() {
|
||||
robotHeading = (double)gyro.getIntegratedZValue();
|
||||
return robotHeading;
|
||||
public double getRawHeading() {
|
||||
Orientation angles = imu.getAngularOrientation(AxesReference.INTRINSIC, AxesOrder.ZYX, AngleUnit.DEGREES);
|
||||
return angles.firstAngle;
|
||||
}
|
||||
|
||||
/**
|
||||
* returns desired steering force. +/- 1 range. +ve = steer left
|
||||
* @param error Error angle in robot relative degrees
|
||||
* @param PCoeff Proportional Gain Coefficient
|
||||
* @return
|
||||
* Reset the "offset" heading back to zero
|
||||
*/
|
||||
public double getSteer(double error, double PCoeff) {
|
||||
return Range.clip(error * PCoeff, -1, 1);
|
||||
public void resetHeading() {
|
||||
// Save a new heading offset equal to the current raw heading.
|
||||
headingOffset = getRawHeading();
|
||||
robotHeading = 0;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -0,0 +1,293 @@
|
||||
/* Copyright (c) 2023 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.hardware.DcMotor;
|
||||
import com.qualcomm.robotcore.util.Range;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.ExposureControl;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.GainControl;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
|
||||
* The code assumes a Holonomic (Mecanum or X Drive) Robot.
|
||||
*
|
||||
* The drive goal is to rotate to keep the Tag centered in the camera, while strafing to be directly in front of the tag, and
|
||||
* driving towards the tag to achieve the desired distance.
|
||||
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
|
||||
* You can determine the best Exposure and Gain values by using the ConceptAprilTagOptimizeExposure OpMode in this Samples folder.
|
||||
*
|
||||
* The code assumes a Robot Configuration with motors named: leftfront_drive and rightfront_drive, leftback_drive and rightback_drive.
|
||||
* The motor directions must be set so a positive power goes forward on all wheels.
|
||||
* This sample assumes that the current game AprilTag Library (usually for the current season) is being loaded by default,
|
||||
* so you should choose to approach a valid tag ID (usually starting at 0)
|
||||
*
|
||||
* Under manual control, the left stick will move forward/back & left/right. The right stick will rotate the robot.
|
||||
* Manually drive the robot until it displays Target data on the Driver Station.
|
||||
*
|
||||
* Press and hold the *Left Bumper* to enable the automatic "Drive to target" mode.
|
||||
* Release the Left Bumper to return to manual driving mode.
|
||||
*
|
||||
* Under "Drive To Target" mode, the robot has three goals:
|
||||
* 1) Turn the robot to always keep the Tag centered on the camera frame. (Use the Target Bearing to turn the robot.)
|
||||
* 2) Strafe the robot towards the centerline of the Tag, so it approaches directly in front of the tag. (Use the Target Yaw to strafe the robot)
|
||||
* 3) Drive towards the Tag to get to the desired distance. (Use Tag Range to drive the robot forward/backward)
|
||||
*
|
||||
* Use DESIRED_DISTANCE to set how close you want the robot to get to the target.
|
||||
* Speed and Turn sensitivity can be adjusted using the SPEED_GAIN, STRAFE_GAIN and TURN_GAIN constants.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into the TeamCode/src/main/java/org/firstinspires/ftc/teamcode folder.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*
|
||||
*/
|
||||
|
||||
@TeleOp(name="Omni Drive To AprilTag", group = "Concept")
|
||||
@Disabled
|
||||
public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
|
||||
{
|
||||
// Adjust these numbers to suit your robot.
|
||||
final double DESIRED_DISTANCE = 12.0; // this is how close the camera should get to the target (inches)
|
||||
|
||||
// Set the GAIN constants to control the relationship between the measured position error, and how much power is
|
||||
// applied to the drive motors to correct the error.
|
||||
// Drive = Error * Gain Make these values smaller for smoother control, or larger for a more aggressive response.
|
||||
final double SPEED_GAIN = 0.02 ; // Forward Speed Control "Gain". eg: Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
|
||||
final double STRAFE_GAIN = 0.015 ; // Strafe Speed Control "Gain". eg: Ramp up to 25% power at a 25 degree Yaw error. (0.25 / 25.0)
|
||||
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". eg: Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
|
||||
|
||||
final double MAX_AUTO_SPEED = 0.5; // Clip the approach speed to this max value (adjust for your robot)
|
||||
final double MAX_AUTO_STRAFE= 0.5; // Clip the approach speed to this max value (adjust for your robot)
|
||||
final double MAX_AUTO_TURN = 0.3; // Clip the turn speed to this max value (adjust for your robot)
|
||||
|
||||
private DcMotor leftFrontDrive = null; // Used to control the left front drive wheel
|
||||
private DcMotor rightFrontDrive = null; // Used to control the right front drive wheel
|
||||
private DcMotor leftBackDrive = null; // Used to control the left back drive wheel
|
||||
private DcMotor rightBackDrive = null; // Used to control the right back drive wheel
|
||||
|
||||
private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
|
||||
private static final int DESIRED_TAG_ID = 0; // Choose the tag you want to approach or set to -1 for ANY tag.
|
||||
private VisionPortal visionPortal; // Used to manage the video source.
|
||||
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
|
||||
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
|
||||
|
||||
@Override public void runOpMode()
|
||||
{
|
||||
boolean targetFound = false; // Set to true when an AprilTag target is detected
|
||||
double drive = 0; // Desired forward power/speed (-1 to +1)
|
||||
double strafe = 0; // Desired strafe power/speed (-1 to +1)
|
||||
double turn = 0; // Desired turning power/speed (-1 to +1)
|
||||
|
||||
// Initialize the Apriltag Detection process
|
||||
initAprilTag();
|
||||
|
||||
// Initialize the hardware variables. Note that the strings used here as parameters
|
||||
// to 'get' must match the names assigned during the robot configuration.
|
||||
// step (using the FTC Robot Controller app on the phone).
|
||||
leftFrontDrive = hardwareMap.get(DcMotor.class, "leftfront_drive");
|
||||
rightFrontDrive = hardwareMap.get(DcMotor.class, "rightfront_drive");
|
||||
leftBackDrive = hardwareMap.get(DcMotor.class, "leftback_drive");
|
||||
rightBackDrive = hardwareMap.get(DcMotor.class, "rightback_drive");
|
||||
|
||||
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
|
||||
// When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive.
|
||||
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
|
||||
leftFrontDrive.setDirection(DcMotor.Direction.REVERSE);
|
||||
leftBackDrive.setDirection(DcMotor.Direction.REVERSE);
|
||||
rightFrontDrive.setDirection(DcMotor.Direction.FORWARD);
|
||||
rightBackDrive.setDirection(DcMotor.Direction.FORWARD);
|
||||
|
||||
if (USE_WEBCAM)
|
||||
setManualExposure(6, 250); // Use low exposure time to reduce motion blur
|
||||
|
||||
// Wait for driver to press start
|
||||
telemetry.addData("Camera preview on/off", "3 dots, Camera Stream");
|
||||
telemetry.addData(">", "Touch Play to start OpMode");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
while (opModeIsActive())
|
||||
{
|
||||
targetFound = false;
|
||||
desiredTag = null;
|
||||
|
||||
// Step through the list of detected tags and look for a matching tag
|
||||
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
|
||||
for (AprilTagDetection detection : currentDetections) {
|
||||
if ((detection.metadata != null)
|
||||
&& ((DESIRED_TAG_ID >= 0) || (detection.id == DESIRED_TAG_ID)) ){
|
||||
targetFound = true;
|
||||
desiredTag = detection;
|
||||
break; // don't look any further.
|
||||
}
|
||||
}
|
||||
|
||||
// Tell the driver what we see, and what to do.
|
||||
if (targetFound) {
|
||||
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
|
||||
telemetry.addData("Target", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
|
||||
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
|
||||
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
|
||||
telemetry.addData("Yaw","%3.0f degrees", desiredTag.ftcPose.yaw);
|
||||
} else {
|
||||
telemetry.addData(">","Drive using joystick to find target\n");
|
||||
}
|
||||
|
||||
// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
|
||||
if (gamepad1.left_bumper && targetFound) {
|
||||
|
||||
// Determine heading, range and Yaw (tag image rotation) error so we can use them to control the robot automatically.
|
||||
double rangeError = (desiredTag.ftcPose.range - DESIRED_DISTANCE);
|
||||
double headingError = desiredTag.ftcPose.bearing;
|
||||
double yawError = desiredTag.ftcPose.yaw;
|
||||
|
||||
// Use the speed and turn "gains" to calculate how we want the robot to move.
|
||||
drive = Range.clip(rangeError * SPEED_GAIN, -MAX_AUTO_SPEED, MAX_AUTO_SPEED);
|
||||
turn = Range.clip(headingError * TURN_GAIN, -MAX_AUTO_TURN, MAX_AUTO_TURN) ;
|
||||
strafe = Range.clip(-yawError * STRAFE_GAIN, -MAX_AUTO_STRAFE, MAX_AUTO_STRAFE);
|
||||
|
||||
telemetry.addData("Auto","Drive %5.2f, Strafe %5.2f, Turn %5.2f ", drive, strafe, turn);
|
||||
} else {
|
||||
|
||||
// drive using manual POV Joystick mode. Slow things down to make the robot more controlable.
|
||||
drive = -gamepad1.left_stick_y / 2.0; // Reduce drive rate to 50%.
|
||||
strafe = -gamepad1.left_stick_x / 2.0; // Reduce strafe rate to 50%.
|
||||
turn = -gamepad1.right_stick_x / 3.0; // Reduce turn rate to 33%.
|
||||
telemetry.addData("Manual","Drive %5.2f, Strafe %5.2f, Turn %5.2f ", drive, strafe, turn);
|
||||
}
|
||||
telemetry.update();
|
||||
|
||||
// Apply desired axes motions to the drivetrain.
|
||||
moveRobot(drive, strafe, turn);
|
||||
sleep(10);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Move robot according to desired axes motions
|
||||
* Positive X is forward
|
||||
* Positive Y is strafe left
|
||||
* Positive Yaw is counter-clockwise
|
||||
*/
|
||||
public void moveRobot(double x, double y, double yaw) {
|
||||
// Calculate wheel powers.
|
||||
double leftFrontPower = x -y -yaw;
|
||||
double rightFrontPower = x +y +yaw;
|
||||
double leftBackPower = x +y -yaw;
|
||||
double rightBackPower = x -y +yaw;
|
||||
|
||||
// Normalize wheel powers to be less than 1.0
|
||||
double max = Math.max(Math.abs(leftFrontPower), Math.abs(rightFrontPower));
|
||||
max = Math.max(max, Math.abs(leftBackPower));
|
||||
max = Math.max(max, Math.abs(rightBackPower));
|
||||
|
||||
if (max > 1.0) {
|
||||
leftFrontPower /= max;
|
||||
rightFrontPower /= max;
|
||||
leftBackPower /= max;
|
||||
rightBackPower /= max;
|
||||
}
|
||||
|
||||
// Send powers to the wheels.
|
||||
leftFrontDrive.setPower(leftFrontPower);
|
||||
rightFrontDrive.setPower(rightFrontPower);
|
||||
leftBackDrive.setPower(leftBackPower);
|
||||
rightBackDrive.setPower(rightBackPower);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the AprilTag processor.
|
||||
*/
|
||||
private void initAprilTag() {
|
||||
// Create the AprilTag processor by using a builder.
|
||||
aprilTag = new AprilTagProcessor.Builder().build();
|
||||
|
||||
// Create the vision portal by using a builder.
|
||||
if (USE_WEBCAM) {
|
||||
visionPortal = new VisionPortal.Builder()
|
||||
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||
.addProcessor(aprilTag)
|
||||
.build();
|
||||
} else {
|
||||
visionPortal = new VisionPortal.Builder()
|
||||
.setCamera(BuiltinCameraDirection.BACK)
|
||||
.addProcessor(aprilTag)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Manually set the camera gain and exposure.
|
||||
This can only be called AFTER calling initAprilTag(), and only works for Webcams;
|
||||
*/
|
||||
private void setManualExposure(int exposureMS, int gain) {
|
||||
// Wait for the camera to be open, then use the controls
|
||||
|
||||
if (visionPortal == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Make sure camera is streaming before we try to set the exposure controls
|
||||
if (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING) {
|
||||
telemetry.addData("Camera", "Waiting");
|
||||
telemetry.update();
|
||||
while (!isStopRequested() && (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING)) {
|
||||
sleep(20);
|
||||
}
|
||||
telemetry.addData("Camera", "Ready");
|
||||
telemetry.update();
|
||||
}
|
||||
|
||||
// Set camera controls unless we are stopping.
|
||||
if (!isStopRequested())
|
||||
{
|
||||
ExposureControl exposureControl = visionPortal.getCameraControl(ExposureControl.class);
|
||||
if (exposureControl.getMode() != ExposureControl.Mode.Manual) {
|
||||
exposureControl.setMode(ExposureControl.Mode.Manual);
|
||||
sleep(50);
|
||||
}
|
||||
exposureControl.setExposure((long)exposureMS, TimeUnit.MILLISECONDS);
|
||||
sleep(20);
|
||||
GainControl gainControl = visionPortal.getCameraControl(GainControl.class);
|
||||
gainControl.setGain(gain);
|
||||
sleep(20);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,271 @@
|
||||
/* Copyright (c) 2023 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.hardware.DcMotor;
|
||||
import com.qualcomm.robotcore.util.Range;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.ExposureControl;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.controls.GainControl;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagDetection;
|
||||
import org.firstinspires.ftc.vision.apriltag.AprilTagProcessor;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
/**
|
||||
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
|
||||
* The code assumes a basic two-wheel (Tank) Robot Drivetrain
|
||||
*
|
||||
* The driving goal is to rotate to keep the tag centered in the camera, while driving towards the tag to achieve the desired distance.
|
||||
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
|
||||
* You can determine the best exposure and gain values by using the ConceptAprilTagOptimizeExposure OpMode in this Samples folder.
|
||||
*
|
||||
* The code assumes a Robot Configuration with motors named left_drive and right_drive.
|
||||
* The motor directions must be set so a positive power goes forward on both wheels;
|
||||
* This sample assumes that the default AprilTag Library (usually for the current season) is being loaded by default
|
||||
* so you should choose to approach a valid tag ID (usually starting at 0)
|
||||
*
|
||||
* Under manual control, the left stick will move forward/back, and the right stick will rotate the robot.
|
||||
* This is called POV Joystick mode, different than Tank Drive (where each joystick controls a wheel).
|
||||
*
|
||||
* Manually drive the robot until it displays Target data on the Driver Station.
|
||||
* Press and hold the *Left Bumper* to enable the automatic "Drive to target" mode.
|
||||
* Release the Left Bumper to return to manual driving mode.
|
||||
*
|
||||
* Under "Drive To Target" mode, the robot has two goals:
|
||||
* 1) Turn the robot to always keep the Tag centered on the camera frame. (Use the Target Bearing to turn the robot.)
|
||||
* 2) Drive towards the Tag to get to the desired distance. (Use Tag Range to drive the robot forward/backward)
|
||||
*
|
||||
* Use DESIRED_DISTANCE to set how close you want the robot to get to the target.
|
||||
* Speed and Turn sensitivity can be adjusted using the SPEED_GAIN and TURN_GAIN constants.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into the TeamCode/src/main/java/org/firstinspires/ftc/teamcode folder.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*
|
||||
*/
|
||||
|
||||
@TeleOp(name="Tank Drive To AprilTag", group = "Concept")
|
||||
@Disabled
|
||||
public class RobotAutoDriveToAprilTagTank extends LinearOpMode
|
||||
{
|
||||
// Adjust these numbers to suit your robot.
|
||||
final double DESIRED_DISTANCE = 12.0; // this is how close the camera should get to the target (inches)
|
||||
|
||||
// Set the GAIN constants to control the relationship between the measured position error, and how much power is
|
||||
// applied to the drive motors to correct the error.
|
||||
// Drive = Error * Gain Make these values smaller for smoother control, or larger for a more aggressive response.
|
||||
final double SPEED_GAIN = 0.02 ; // Speed Control "Gain". eg: Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
|
||||
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". eg: Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
|
||||
|
||||
final double MAX_AUTO_SPEED = 0.5; // Clip the approach speed to this max value (adjust for your robot)
|
||||
final double MAX_AUTO_TURN = 0.25; // Clip the turn speed to this max value (adjust for your robot)
|
||||
|
||||
private DcMotor leftDrive = null; // Used to control the left drive wheel
|
||||
private DcMotor rightDrive = null; // Used to control the right drive wheel
|
||||
|
||||
private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
|
||||
private static final int DESIRED_TAG_ID = 0; // Choose the tag you want to approach or set to -1 for ANY tag.
|
||||
private VisionPortal visionPortal; // Used to manage the video source.
|
||||
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
|
||||
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
|
||||
|
||||
@Override public void runOpMode()
|
||||
{
|
||||
boolean targetFound = false; // Set to true when an AprilTag target is detected
|
||||
double drive = 0; // Desired forward power/speed (-1 to +1) +ve is forward
|
||||
double turn = 0; // Desired turning power/speed (-1 to +1) +ve is CounterClockwise
|
||||
|
||||
// Initialize the Apriltag Detection process
|
||||
initAprilTag();
|
||||
|
||||
// Initialize the hardware variables. Note that the strings used here as parameters
|
||||
// to 'get' must match the names assigned during the robot configuration.
|
||||
// step (using the FTC Robot Controller app on the phone).
|
||||
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
|
||||
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
|
||||
|
||||
// To drive forward, most robots need the motor on one side to be reversed because the axles point in opposite directions.
|
||||
// When run, this OpMode should start both motors driving forward. So adjust these two lines based on your first test drive.
|
||||
// Note: The settings here assume direct drive on left and right wheels. Single Gear Reduction or 90 Deg drives may require direction flips
|
||||
leftDrive.setDirection(DcMotor.Direction.REVERSE);
|
||||
rightDrive.setDirection(DcMotor.Direction.FORWARD);
|
||||
|
||||
if (USE_WEBCAM)
|
||||
setManualExposure(6, 250); // Use low exposure time to reduce motion blur
|
||||
|
||||
// Wait for the driver to press Start
|
||||
telemetry.addData("Camera preview on/off", "3 dots, Camera Stream");
|
||||
telemetry.addData(">", "Touch Play to start OpMode");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
while (opModeIsActive())
|
||||
{
|
||||
targetFound = false;
|
||||
desiredTag = null;
|
||||
|
||||
// Step through the list of detected tags and look for a matching tag
|
||||
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
|
||||
for (AprilTagDetection detection : currentDetections) {
|
||||
if ((detection.metadata != null)
|
||||
&& ((DESIRED_TAG_ID >= 0) || (detection.id == DESIRED_TAG_ID)) ){
|
||||
targetFound = true;
|
||||
desiredTag = detection;
|
||||
break; // don't look any further.
|
||||
}
|
||||
}
|
||||
|
||||
// Tell the driver what we see, and what to do.
|
||||
if (targetFound) {
|
||||
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
|
||||
telemetry.addData("Target", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
|
||||
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
|
||||
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
|
||||
} else {
|
||||
telemetry.addData(">","Drive using joystick to find target\n");
|
||||
}
|
||||
|
||||
// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
|
||||
if (gamepad1.left_bumper && targetFound) {
|
||||
|
||||
// Determine heading and range error so we can use them to control the robot automatically.
|
||||
double rangeError = (desiredTag.ftcPose.range - DESIRED_DISTANCE);
|
||||
double headingError = desiredTag.ftcPose.bearing;
|
||||
|
||||
// Use the speed and turn "gains" to calculate how we want the robot to move. Clip it to the maximum
|
||||
drive = Range.clip(rangeError * SPEED_GAIN, -MAX_AUTO_SPEED, MAX_AUTO_SPEED);
|
||||
turn = Range.clip(headingError * TURN_GAIN, -MAX_AUTO_TURN, MAX_AUTO_TURN) ;
|
||||
|
||||
telemetry.addData("Auto","Drive %5.2f, Turn %5.2f", drive, turn);
|
||||
} else {
|
||||
|
||||
// drive using manual POV Joystick mode.
|
||||
drive = -gamepad1.left_stick_y / 2.0; // Reduce drive rate to 50%.
|
||||
turn = -gamepad1.right_stick_x / 4.0; // Reduce turn rate to 25%.
|
||||
telemetry.addData("Manual","Drive %5.2f, Turn %5.2f", drive, turn);
|
||||
}
|
||||
telemetry.update();
|
||||
|
||||
// Apply desired axes motions to the drivetrain.
|
||||
moveRobot(drive, turn);
|
||||
sleep(10);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Move robot according to desired axes motions
|
||||
* Positive X is forward
|
||||
* Positive Yaw is counter-clockwise
|
||||
*/
|
||||
public void moveRobot(double x, double yaw) {
|
||||
// Calculate left and right wheel powers.
|
||||
double leftPower = x - yaw;
|
||||
double rightPower = x + yaw;
|
||||
|
||||
// Normalize wheel powers to be less than 1.0
|
||||
double max = Math.max(Math.abs(leftPower), Math.abs(rightPower));
|
||||
if (max >1.0) {
|
||||
leftPower /= max;
|
||||
rightPower /= max;
|
||||
}
|
||||
|
||||
// Send powers to the wheels.
|
||||
leftDrive.setPower(leftPower);
|
||||
rightDrive.setPower(rightPower);
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize the AprilTag processor.
|
||||
*/
|
||||
private void initAprilTag() {
|
||||
// Create the AprilTag processor by using a builder.
|
||||
aprilTag = new AprilTagProcessor.Builder().build();
|
||||
|
||||
// Create the vision portal by using a builder.
|
||||
if (USE_WEBCAM) {
|
||||
visionPortal = new VisionPortal.Builder()
|
||||
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||
.addProcessor(aprilTag)
|
||||
.build();
|
||||
} else {
|
||||
visionPortal = new VisionPortal.Builder()
|
||||
.setCamera(BuiltinCameraDirection.BACK)
|
||||
.addProcessor(aprilTag)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Manually set the camera gain and exposure.
|
||||
This can only be called AFTER calling initAprilTag(), and only works for Webcams;
|
||||
*/
|
||||
private void setManualExposure(int exposureMS, int gain) {
|
||||
// Wait for the camera to be open, then use the controls
|
||||
|
||||
if (visionPortal == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Make sure camera is streaming before we try to set the exposure controls
|
||||
if (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING) {
|
||||
telemetry.addData("Camera", "Waiting");
|
||||
telemetry.update();
|
||||
while (!isStopRequested() && (visionPortal.getCameraState() != VisionPortal.CameraState.STREAMING)) {
|
||||
sleep(20);
|
||||
}
|
||||
telemetry.addData("Camera", "Ready");
|
||||
telemetry.update();
|
||||
}
|
||||
|
||||
// Set camera controls unless we are stopping.
|
||||
if (!isStopRequested())
|
||||
{
|
||||
ExposureControl exposureControl = visionPortal.getCameraControl(ExposureControl.class);
|
||||
if (exposureControl.getMode() != ExposureControl.Mode.Manual) {
|
||||
exposureControl.setMode(ExposureControl.Mode.Manual);
|
||||
sleep(50);
|
||||
}
|
||||
exposureControl.setExposure((long)exposureMS, TimeUnit.MILLISECONDS);
|
||||
sleep(20);
|
||||
GainControl gainControl = visionPortal.getCameraControl(GainControl.class);
|
||||
gainControl.setGain(gain);
|
||||
sleep(20);
|
||||
telemetry.addData("Camera", "Ready");
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
}
|
@ -90,8 +90,8 @@ public class RobotAutoDriveToLine_Linear extends LinearOpMode {
|
||||
rightDrive.setDirection(DcMotor.Direction.FORWARD);
|
||||
|
||||
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
|
||||
// robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
// robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
// leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
// rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
|
||||
// Get a reference to our sensor object. It's recommended to use NormalizedColorSensor over
|
||||
// ColorSensor, because NormalizedColorSensor consistently gives values between 0 and 1, while
|
||||
|
@ -0,0 +1,167 @@
|
||||
/* Copyright (c) 2022 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.hardware.DcMotor;
|
||||
import com.qualcomm.robotcore.hardware.Servo;
|
||||
import com.qualcomm.robotcore.util.Range;
|
||||
|
||||
/**
|
||||
* This file works in conjunction with the External Hardware Class sample called: ConceptExternalHardwareClass.java
|
||||
* Please read the explanations in that Sample about how to use this class definition.
|
||||
*
|
||||
* This file defines a Java Class that performs all the setup and configuration for a sample robot's hardware (motors and sensors).
|
||||
* It assumes three motors (left_drive, right_drive and arm) and two servos (left_hand and right_hand)
|
||||
*
|
||||
* This one file/class can be used by ALL of your OpModes without having to cut & paste the code each time.
|
||||
*
|
||||
* Where possible, the actual hardware objects are "abstracted" (or hidden) so the OpMode code just makes calls into the class,
|
||||
* rather than accessing the internal hardware directly. This is why the objects are declared "private".
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with *exactly the same name*.
|
||||
*
|
||||
* Or.. In OnBot Java, add a new file named RobotHardware.java, drawing from this Sample; select Not an OpMode.
|
||||
* Also add a new OpMode, drawing from the Sample ConceptExternalHardwareClass.java; select TeleOp.
|
||||
*
|
||||
*/
|
||||
|
||||
public class RobotHardware {
|
||||
|
||||
/* Declare OpMode members. */
|
||||
private LinearOpMode myOpMode = null; // gain access to methods in the calling OpMode.
|
||||
|
||||
// Define Motor and Servo objects (Make them private so they can't be accessed externally)
|
||||
private DcMotor leftDrive = null;
|
||||
private DcMotor rightDrive = null;
|
||||
private DcMotor armMotor = null;
|
||||
private Servo leftHand = null;
|
||||
private Servo rightHand = null;
|
||||
|
||||
// Define Drive constants. Make them public so they CAN be used by the calling OpMode
|
||||
public static final double MID_SERVO = 0.5 ;
|
||||
public static final double HAND_SPEED = 0.02 ; // sets rate to move servo
|
||||
public static final double ARM_UP_POWER = 0.45 ;
|
||||
public static final double ARM_DOWN_POWER = -0.45 ;
|
||||
|
||||
// Define a constructor that allows the OpMode to pass a reference to itself.
|
||||
public RobotHardware (LinearOpMode opmode) {
|
||||
myOpMode = opmode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize all the robot's hardware.
|
||||
* This method must be called ONCE when the OpMode is initialized.
|
||||
*
|
||||
* All of the hardware devices are accessed via the hardware map, and initialized.
|
||||
*/
|
||||
public void init() {
|
||||
// Define and Initialize Motors (note: need to use reference to actual OpMode).
|
||||
leftDrive = myOpMode.hardwareMap.get(DcMotor.class, "left_drive");
|
||||
rightDrive = myOpMode.hardwareMap.get(DcMotor.class, "right_drive");
|
||||
armMotor = myOpMode.hardwareMap.get(DcMotor.class, "arm");
|
||||
|
||||
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
|
||||
// Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive.
|
||||
// Note: The settings here assume direct drive on left and right wheels. Gear Reduction or 90 Deg drives may require direction flips
|
||||
leftDrive.setDirection(DcMotor.Direction.REVERSE);
|
||||
rightDrive.setDirection(DcMotor.Direction.FORWARD);
|
||||
|
||||
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
|
||||
// leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
// rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
|
||||
// Define and initialize ALL installed servos.
|
||||
leftHand = myOpMode.hardwareMap.get(Servo.class, "left_hand");
|
||||
rightHand = myOpMode.hardwareMap.get(Servo.class, "right_hand");
|
||||
leftHand.setPosition(MID_SERVO);
|
||||
rightHand.setPosition(MID_SERVO);
|
||||
|
||||
myOpMode.telemetry.addData(">", "Hardware Initialized");
|
||||
myOpMode.telemetry.update();
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the left/right motor powers required to achieve the requested
|
||||
* robot motions: Drive (Axial motion) and Turn (Yaw motion).
|
||||
* Then sends these power levels to the motors.
|
||||
*
|
||||
* @param Drive Fwd/Rev driving power (-1.0 to 1.0) +ve is forward
|
||||
* @param Turn Right/Left turning power (-1.0 to 1.0) +ve is CW
|
||||
*/
|
||||
public void driveRobot(double Drive, double Turn) {
|
||||
// Combine drive and turn for blended motion.
|
||||
double left = Drive + Turn;
|
||||
double right = Drive - Turn;
|
||||
|
||||
// Scale the values so neither exceed +/- 1.0
|
||||
double max = Math.max(Math.abs(left), Math.abs(right));
|
||||
if (max > 1.0)
|
||||
{
|
||||
left /= max;
|
||||
right /= max;
|
||||
}
|
||||
|
||||
// Use existing function to drive both wheels.
|
||||
setDrivePower(left, right);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pass the requested wheel motor powers to the appropriate hardware drive motors.
|
||||
*
|
||||
* @param leftWheel Fwd/Rev driving power (-1.0 to 1.0) +ve is forward
|
||||
* @param rightWheel Fwd/Rev driving power (-1.0 to 1.0) +ve is forward
|
||||
*/
|
||||
public void setDrivePower(double leftWheel, double rightWheel) {
|
||||
// Output the values to the motor drives.
|
||||
leftDrive.setPower(leftWheel);
|
||||
rightDrive.setPower(rightWheel);
|
||||
}
|
||||
|
||||
/**
|
||||
* Pass the requested arm power to the appropriate hardware drive motor
|
||||
*
|
||||
* @param power driving power (-1.0 to 1.0)
|
||||
*/
|
||||
public void setArmPower(double power) {
|
||||
armMotor.setPower(power);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send the two hand-servos to opposing (mirrored) positions, based on the passed offset.
|
||||
*
|
||||
* @param offset
|
||||
*/
|
||||
public void setHandPositions(double offset) {
|
||||
offset = Range.clip(offset, -0.5, 0.5);
|
||||
leftHand.setPosition(MID_SERVO + offset);
|
||||
rightHand.setPosition(MID_SERVO - offset);
|
||||
}
|
||||
}
|
@ -86,8 +86,8 @@ public class RobotTeleopPOV_Linear extends LinearOpMode {
|
||||
rightDrive.setDirection(DcMotor.Direction.FORWARD);
|
||||
|
||||
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
|
||||
// robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
// robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
// leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
// rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
|
||||
// Define and initialize ALL installed servos.
|
||||
leftClaw = hardwareMap.get(Servo.class, "left_hand");
|
||||
@ -96,7 +96,7 @@ public class RobotTeleopPOV_Linear extends LinearOpMode {
|
||||
rightClaw.setPosition(MID_SERVO);
|
||||
|
||||
// Send telemetry message to signify robot waiting;
|
||||
telemetry.addData("Say", "Hello Driver"); //
|
||||
telemetry.addData(">", "Robot Ready. Press Play."); //
|
||||
telemetry.update();
|
||||
|
||||
// Wait for the game to start (driver presses PLAY)
|
||||
|
@ -84,8 +84,8 @@ public class RobotTeleopTank_Iterative extends OpMode{
|
||||
rightDrive.setDirection(DcMotor.Direction.FORWARD);
|
||||
|
||||
// If there are encoders connected, switch to RUN_USING_ENCODER mode for greater accuracy
|
||||
// robot.leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
// robot.rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
// leftDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
// rightDrive.setMode(DcMotor.RunMode.RUN_USING_ENCODER);
|
||||
|
||||
// Define and initialize ALL installed servos.
|
||||
leftClaw = hardwareMap.get(Servo.class, "left_hand");
|
||||
@ -94,7 +94,7 @@ public class RobotTeleopTank_Iterative extends OpMode{
|
||||
rightClaw.setPosition(MID_SERVO);
|
||||
|
||||
// Send telemetry message to signify robot waiting;
|
||||
telemetry.addData("Say", "Hello Driver"); //
|
||||
telemetry.addData(">", "Robot Ready. Press Play."); //
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -49,13 +49,16 @@ import java.util.Locale;
|
||||
/**
|
||||
* {@link SensorBNO055IMU} gives a short demo on how to use the BNO055 Inertial Motion Unit (IMU) from AdaFruit.
|
||||
*
|
||||
* Note: this is a Legacy example that will not work with newer Control/Expansion Hubs that use a different IMU
|
||||
* Please use the new SensorIMUOrthogonal or SensorIMUNonOrthogonal samples for a more universal IMU interface.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list
|
||||
*
|
||||
* @see <a href="http://www.adafruit.com/products/2472">Adafruit IMU</a>
|
||||
*/
|
||||
@TeleOp(name = "Sensor: BNO055 IMU", group = "Sensor")
|
||||
@Disabled // Comment this out to add to the opmode list
|
||||
@Disabled // Comment this out to add to the opmode list
|
||||
public class SensorBNO055IMU extends LinearOpMode
|
||||
{
|
||||
//----------------------------------------------------------------------------------------------
|
||||
|
@ -50,6 +50,9 @@ import java.util.Locale;
|
||||
* {@link SensorBNO055IMUCalibration} calibrates the IMU accelerometer per
|
||||
* "Section 3.11 Calibration" of the BNO055 specification.
|
||||
*
|
||||
* Note: this is a Legacy example that will not work with newer Control/Expansion Hubs that use a different IMU
|
||||
* Please use the new SensorIMUOrthogonal or SensorIMUNonOrthogonal samples for a more universal IMU interface.
|
||||
*
|
||||
* <p>Manual calibration of the IMU is definitely NOT necessary: except for the magnetometer
|
||||
* (which is not used by the default {@link BNO055IMU.SensorMode#IMU
|
||||
* SensorMode#IMU}), the BNO055 is internally self-calibrating and thus can be very successfully
|
||||
|
@ -0,0 +1,183 @@
|
||||
/* Copyright (c) 2022 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import static com.qualcomm.hardware.rev.RevHubOrientationOnRobot.xyzOrientation;
|
||||
|
||||
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.hardware.IMU;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
|
||||
|
||||
/**
|
||||
* {@link SensorIMUNonOrthogonal} shows how to use the new universal {@link IMU} interface. This
|
||||
* interface may be used with the BNO055 IMU or the BHI260 IMU. It assumes that an IMU is configured
|
||||
* on the robot with the name "imu".
|
||||
* <p>
|
||||
* The sample will display the current Yaw, Pitch and Roll of the robot.<br>
|
||||
* With the correct orientation parameters selected, pitch/roll/yaw should act as follows:
|
||||
* <p>
|
||||
* Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X) <br>
|
||||
* Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y) <br>
|
||||
* Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z) <br>
|
||||
* <p>
|
||||
* The yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller)
|
||||
* <p>
|
||||
* This specific sample DOES NOT assume that the Hub is mounted on one of the three orthogonal
|
||||
* planes (X/Y, X/Z or Y/Z) OR that the Hub has only been rotated in a range of 90 degree increments.
|
||||
* <p>
|
||||
* Note: if your Hub is mounted Orthogonally (on a orthogonal surface, angled at some multiple of
|
||||
* 90 Degrees) then you should use the simpler SensorImuOrthogonal sample in this folder.
|
||||
* <p>
|
||||
* But... If your Hub is mounted Non-Orthogonally, you must specify one or more rotational angles
|
||||
* that transform a "Default" Hub orientation into your desired orientation. That is what is
|
||||
* illustrated here.
|
||||
* <p>
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
* <p>
|
||||
* Finally, edit this OpMode to use at least one angle around an axis to orient your Hub.
|
||||
*/
|
||||
@TeleOp(name = "Sensor: IMU Non-Orthogonal", group = "Sensor")
|
||||
@Disabled // Comment this out to add to the OpMode list
|
||||
public class SensorIMUNonOrthogonal extends LinearOpMode
|
||||
{
|
||||
// The IMU sensor object
|
||||
IMU imu;
|
||||
|
||||
//----------------------------------------------------------------------------------------------
|
||||
// Main logic
|
||||
//----------------------------------------------------------------------------------------------
|
||||
|
||||
@Override public void runOpMode() throws InterruptedException {
|
||||
|
||||
// Retrieve and initialize the IMU.
|
||||
// This sample expects the IMU to be in a REV Hub and named "imu".
|
||||
imu = hardwareMap.get(IMU.class, "imu");
|
||||
|
||||
/* Define how the hub is mounted to the robot to get the correct Yaw, Pitch and Roll values.
|
||||
*
|
||||
* You can apply up to three axis rotations to orient your Hub according to how it's mounted on the robot.
|
||||
*
|
||||
* The starting point for these rotations is the "Default" Hub orientation, which is:
|
||||
* 1) Hub laying flat on a horizontal surface, with the Printed Logo facing UP
|
||||
* 2) Rotated such that the USB ports are facing forward on the robot.
|
||||
*
|
||||
* The order that the rotations are performed matters, so this sample shows doing them in the order X, Y, then Z.
|
||||
* For specifying non-orthogonal hub mounting orientations, we must temporarily use axes
|
||||
* defined relative to the Hub itself, instead of the usual Robot Coordinate System axes
|
||||
* used for the results the IMU gives us. In the starting orientation, the Hub axes are
|
||||
* aligned with the Robot Coordinate System:
|
||||
*
|
||||
* X Axis: Starting at Center of Hub, pointing out towards I2C connectors
|
||||
* Y Axis: Starting at Center of Hub, pointing out towards USB connectors
|
||||
* Z Axis: Starting at Center of Hub, pointing Up through LOGO
|
||||
*
|
||||
* Positive rotation is defined by right-hand rule with thumb pointing in +ve direction on axis.
|
||||
*
|
||||
* Some examples.
|
||||
*
|
||||
* ----------------------------------------------------------------------------------------------------------------------------------
|
||||
* Example A) Assume that the hub is mounted on a sloped plate at the back of the robot, with the USB ports coming out the top of the hub.
|
||||
* The plate is tilted UP 60 degrees from horizontal.
|
||||
*
|
||||
* To get the "Default" hub into this configuration you would just need a single rotation.
|
||||
* 1) Rotate the Hub +60 degrees around the X axis to tilt up the front edge.
|
||||
* 2) No rotation around the Y or Z axes.
|
||||
*
|
||||
* So the X,Y,Z rotations would be 60,0,0
|
||||
*
|
||||
* ----------------------------------------------------------------------------------------------------------------------------------
|
||||
* Example B) Assume that the hub is laying flat on the chassis, but it has been twisted 30 degrees towards the right front wheel to make
|
||||
* the USB cable accessible.
|
||||
*
|
||||
* To get the "Default" hub into this configuration you would just need a single rotation, but around a different axis.
|
||||
* 1) No rotation around the X or Y axes.
|
||||
* 1) Rotate the Hub -30 degrees (Clockwise) around the Z axis, since a positive angle would be Counter Clockwise.
|
||||
*
|
||||
* So the X,Y,Z rotations would be 0,0,-30
|
||||
*
|
||||
* ----------------------------------------------------------------------------------------------------------------------------------
|
||||
* Example C) Assume that the hub is mounted on a vertical plate on the right side of the robot, with the Logo facing out, and the
|
||||
* Hub rotated so that the USB ports are facing down 30 degrees towards the back wheels of the robot.
|
||||
*
|
||||
* To get the "Default" hub into this configuration will require several rotations.
|
||||
* 1) Rotate the hub +90 degrees around the X axis to get it standing upright with the logo pointing backwards on the robot
|
||||
* 2) Next, rotate the hub +90 around the Y axis to get it facing to the right.
|
||||
* 3) Finally rotate the hub +120 degrees around the Z axis to take the USB ports from vertical to sloping down 30 degrees and
|
||||
* facing towards the back of the robot.
|
||||
*
|
||||
* So the X,Y,Z rotations would be 90,90,120
|
||||
*/
|
||||
|
||||
// The next three lines define the desired axis rotations.
|
||||
// To Do: EDIT these values to match YOUR mounting configuration.
|
||||
double xRotation = 0; // enter the desired X rotation angle here.
|
||||
double yRotation = 0; // enter the desired Y rotation angle here.
|
||||
double zRotation = 0; // enter the desired Z rotation angle here.
|
||||
|
||||
Orientation hubRotation = xyzOrientation(xRotation, yRotation, zRotation);
|
||||
|
||||
// Now initialize the IMU with this mounting orientation
|
||||
RevHubOrientationOnRobot orientationOnRobot = new RevHubOrientationOnRobot(hubRotation);
|
||||
imu.initialize(new IMU.Parameters(orientationOnRobot));
|
||||
|
||||
// Loop and update the dashboard
|
||||
while (!isStopRequested()) {
|
||||
telemetry.addData("Hub orientation", "X=%.1f, Y=%.1f, Z=%.1f \n", xRotation, yRotation, zRotation);
|
||||
|
||||
// Check to see if heading reset is requested
|
||||
if (gamepad1.y) {
|
||||
telemetry.addData("Yaw", "Resetting\n");
|
||||
imu.resetYaw();
|
||||
} else {
|
||||
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n");
|
||||
}
|
||||
|
||||
// Retrieve Rotational Angles and Velocities
|
||||
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
|
||||
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
|
||||
|
||||
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
|
||||
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
|
||||
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
|
||||
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
|
||||
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
|
||||
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,145 @@
|
||||
/* Copyright (c) 2022 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.hardware.IMU;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
|
||||
|
||||
/**
|
||||
* {@link SensorIMUOrthogonal} shows how to use the new universal {@link IMU} interface. This
|
||||
* interface may be used with the BNO055 IMU or the BHI260 IMU. It assumes that an IMU is configured
|
||||
* on the robot with the name "imu".
|
||||
* <p>
|
||||
* The sample will display the current Yaw, Pitch and Roll of the robot.<br>
|
||||
* With the correct orientation parameters selected, pitch/roll/yaw should act as follows:
|
||||
* <p>
|
||||
* Pitch value should INCREASE as the robot is tipped UP at the front. (Rotation about X) <br>
|
||||
* Roll value should INCREASE as the robot is tipped UP at the left side. (Rotation about Y) <br>
|
||||
* Yaw value should INCREASE as the robot is rotated Counter Clockwise. (Rotation about Z) <br>
|
||||
* <p>
|
||||
* The yaw can be reset (to zero) by pressing the Y button on the gamepad (Triangle on a PS4 controller)
|
||||
* <p>
|
||||
* This specific sample assumes that the Hub is mounted on one of the three orthogonal planes
|
||||
* (X/Y, X/Z or Y/Z) and that the Hub has only been rotated in a range of 90 degree increments.
|
||||
* <p>
|
||||
* Note: if your Hub is mounted on a surface angled at some non-90 Degree multiple (like 30) look at
|
||||
* the alternative SensorImuNonOrthogonal sample in this folder.
|
||||
* <p>
|
||||
* This "Orthogonal" requirement means that:
|
||||
* <p>
|
||||
* 1) The Logo printed on the top of the Hub can ONLY be pointing in one of six directions:
|
||||
* FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT.
|
||||
* <p>
|
||||
* 2) The USB ports can only be pointing in one of the same six directions:<br>
|
||||
* FORWARD, BACKWARD, UP, DOWN, LEFT and RIGHT.
|
||||
* <p>
|
||||
* So, To fully define how your Hub is mounted to the robot, you must simply specify:<br>
|
||||
* logoFacingDirection<br>
|
||||
* usbFacingDirection
|
||||
* <p>
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
* <p>
|
||||
* Finally, choose the two correct parameters to define how your Hub is mounted and edit this OpMode
|
||||
* to use those parameters.
|
||||
*/
|
||||
@TeleOp(name = "Sensor: IMU Orthogonal", group = "Sensor")
|
||||
@Disabled // Comment this out to add to the OpMode list
|
||||
public class SensorIMUOrthogonal extends LinearOpMode
|
||||
{
|
||||
// The IMU sensor object
|
||||
IMU imu;
|
||||
|
||||
//----------------------------------------------------------------------------------------------
|
||||
// Main logic
|
||||
//----------------------------------------------------------------------------------------------
|
||||
|
||||
@Override public void runOpMode() throws InterruptedException {
|
||||
|
||||
// Retrieve and initialize the IMU.
|
||||
// This sample expects the IMU to be in a REV Hub and named "imu".
|
||||
imu = hardwareMap.get(IMU.class, "imu");
|
||||
|
||||
/* Define how the hub is mounted on the robot to get the correct Yaw, Pitch and Roll values.
|
||||
*
|
||||
* Two input parameters are required to fully specify the Orientation.
|
||||
* The first parameter specifies the direction the printed logo on the Hub is pointing.
|
||||
* The second parameter specifies the direction the USB connector on the Hub is pointing.
|
||||
* All directions are relative to the robot, and left/right is as-viewed from behind the robot.
|
||||
*/
|
||||
|
||||
/* The next two lines define Hub orientation.
|
||||
* The Default Orientation (shown) is when a hub is mounted horizontally with the printed logo pointing UP and the USB port pointing FORWARD.
|
||||
*
|
||||
* To Do: EDIT these two lines to match YOUR mounting configuration.
|
||||
*/
|
||||
RevHubOrientationOnRobot.LogoFacingDirection logoDirection = RevHubOrientationOnRobot.LogoFacingDirection.UP;
|
||||
RevHubOrientationOnRobot.UsbFacingDirection usbDirection = RevHubOrientationOnRobot.UsbFacingDirection.FORWARD;
|
||||
|
||||
RevHubOrientationOnRobot orientationOnRobot = new RevHubOrientationOnRobot(logoDirection, usbDirection);
|
||||
|
||||
// Now initialize the IMU with this mounting orientation
|
||||
// Note: if you choose two conflicting directions, this initialization will cause a code exception.
|
||||
imu.initialize(new IMU.Parameters(orientationOnRobot));
|
||||
|
||||
// Loop and update the dashboard
|
||||
while (!isStopRequested()) {
|
||||
|
||||
telemetry.addData("Hub orientation", "Logo=%s USB=%s\n ", logoDirection, usbDirection);
|
||||
|
||||
// Check to see if heading reset is requested
|
||||
if (gamepad1.y) {
|
||||
telemetry.addData("Yaw", "Resetting\n");
|
||||
imu.resetYaw();
|
||||
} else {
|
||||
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n");
|
||||
}
|
||||
|
||||
// Retrieve Rotational Angles and Velocities
|
||||
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
|
||||
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
|
||||
|
||||
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
|
||||
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
|
||||
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
|
||||
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
|
||||
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
|
||||
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,129 @@
|
||||
/*
|
||||
* Copyright (c) 2023 FIRST
|
||||
*
|
||||
* All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to
|
||||
* endorse or promote products derived from this software without specific prior
|
||||
* written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
|
||||
* THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import android.util.Size;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* This Utility OpMode helps calibrate a webcam or RC phone camera, useful for AprilTag pose estimation
|
||||
* with the FTC VisionPortal. It captures a camera frame (image) and stores it on the Robot Controller
|
||||
* (Control Hub or RC phone), with each press of the gamepad button X (or Square).
|
||||
* Full calibration instructions are here:
|
||||
*
|
||||
* https://ftc-docs.firstinspires.org/camera-calibration
|
||||
*
|
||||
* In Android Studio, copy this class into your "teamcode" folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this file to the Driver Station OpMode list.
|
||||
*
|
||||
* In OnBot Java, use "Add File" to add this file from the list of Samples.
|
||||
*/
|
||||
|
||||
@TeleOp(name = "Utility: Camera Frame Capture", group = "Utility")
|
||||
@Disabled
|
||||
public class UtilityCameraFrameCapture extends LinearOpMode
|
||||
{
|
||||
/*
|
||||
* EDIT THESE PARAMETERS AS NEEDED
|
||||
*/
|
||||
final boolean USING_WEBCAM = false;
|
||||
final BuiltinCameraDirection INTERNAL_CAM_DIR = BuiltinCameraDirection.BACK;
|
||||
final int RESOLUTION_WIDTH = 640;
|
||||
final int RESOLUTION_HEIGHT = 480;
|
||||
|
||||
// Internal state
|
||||
boolean lastX;
|
||||
int frameCount;
|
||||
long capReqTime;
|
||||
|
||||
@Override
|
||||
public void runOpMode()
|
||||
{
|
||||
VisionPortal portal;
|
||||
|
||||
if (USING_WEBCAM)
|
||||
{
|
||||
portal = new VisionPortal.Builder()
|
||||
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||
.setCameraResolution(new Size(RESOLUTION_WIDTH, RESOLUTION_HEIGHT))
|
||||
.build();
|
||||
}
|
||||
else
|
||||
{
|
||||
portal = new VisionPortal.Builder()
|
||||
.setCamera(INTERNAL_CAM_DIR)
|
||||
.setCameraResolution(new Size(RESOLUTION_WIDTH, RESOLUTION_HEIGHT))
|
||||
.build();
|
||||
}
|
||||
|
||||
while (!isStopRequested())
|
||||
{
|
||||
boolean x = gamepad1.x;
|
||||
|
||||
if (x && !lastX)
|
||||
{
|
||||
portal.saveNextFrameRaw(String.format(Locale.US, "CameraFrameCapture-%06d", frameCount++));
|
||||
capReqTime = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
lastX = x;
|
||||
|
||||
telemetry.addLine("######## Camera Capture Utility ########");
|
||||
telemetry.addLine(String.format(Locale.US, " > Resolution: %dx%d", RESOLUTION_WIDTH, RESOLUTION_HEIGHT));
|
||||
telemetry.addLine(" > Press X (or Square) to capture a frame");
|
||||
telemetry.addData(" > Camera Status", portal.getCameraState());
|
||||
|
||||
if (capReqTime != 0)
|
||||
{
|
||||
telemetry.addLine("\nCaptured Frame!");
|
||||
}
|
||||
|
||||
if (capReqTime != 0 && System.currentTimeMillis() - capReqTime > 1000)
|
||||
{
|
||||
capReqTime = 0;
|
||||
}
|
||||
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
}
|
@ -37,14 +37,9 @@ Concept: This is a sample OpMode that illustrates performing a specific function
|
||||
Each OpMode should try to only demonstrate a single concept so they are easy to
|
||||
locate based on their name. These OpModes may not produce a drivable robot.
|
||||
|
||||
Library: This is a class, or set of classes used to implement some strategy.
|
||||
These will typically NOT implement a full OpMode. Instead they will be included
|
||||
by an OpMode to provide some stand-alone capability.
|
||||
|
||||
After the prefix, other conventions will apply:
|
||||
|
||||
* Sensor class names are constructed as: Sensor - Company - Type
|
||||
* Robot class names are constructed as: Robot - Mode - Action - OpModetype
|
||||
* Concept class names are constructed as: Concept - Topic - OpModetype
|
||||
* Library class names are constructed as: Library - Topic - OpModetype
|
||||
|
||||
|
@ -5,45 +5,35 @@ This document defines the FTC Sample OpMode and Class conventions.
|
||||
|
||||
### OpMode Name
|
||||
|
||||
A range of different samples classes will reside in the java/external/samples folder.
|
||||
To gain a better understanding of how the samples are organized, and how to interpret the
|
||||
naming system, it will help to understand the conventions that were used during their creation.
|
||||
|
||||
For ease of understanding, the class names will follow a naming convention which indicates
|
||||
the purpose of each class. The prefix of the name will be one of the following:
|
||||
To summarize: A range of different samples classes will reside in the java/external/samples.
|
||||
The class names will follow a naming convention which indicates the purpose of each class.
|
||||
The prefix of the name will be one of the following:
|
||||
|
||||
Basic: This is a minimally functional OpMode used to illustrate the skeleton/structure
|
||||
of a particular style of OpMode. These are bare bones Tank Drive examples.
|
||||
Basic: This is a minimally functional OpMode used to illustrate the skeleton/structure
|
||||
of a particular style of OpMode. These are bare bones examples.
|
||||
|
||||
Sensor: This is a Sample OpMode that shows how to use a specific sensor.
|
||||
It is not intended to drive a functioning robot, it is simply showing the minimal code
|
||||
required to read and display the sensor values.
|
||||
|
||||
Hardware: This is not an actual OpMode, but a helper class that is used to describe
|
||||
one particular robot's hardware configuration: eg: For the K9 or Robot.
|
||||
Look at any Robot sample to see how this can be used in an OpMode.
|
||||
Teams can copy one of these to create their own robot definition.
|
||||
|
||||
Robot: This is a Sample OpMode that uses the Robot robot hardware as a base.
|
||||
It may be used to provide some standard baseline Robot opmodes, or
|
||||
to demonstrate how a particular sensor or concept can be used directly on the
|
||||
Robot chassis.
|
||||
Robot: This is a Sample OpMode that assumes a simple two-motor (differential) drive base.
|
||||
It may be used to provide a common baseline driving OpMode, or
|
||||
to demonstrate how a particular sensor or concept can be used to navigate.
|
||||
|
||||
Concept: This is a sample OpMode that illustrates performing a specific function or concept.
|
||||
These may be complex, but their operation should be explained clearly in the comments,
|
||||
or the comments should reference an external doc, guide or tutorial.
|
||||
Each OpMode should try to only demonstrate a single concept so they are easy to
|
||||
locate based on their name.
|
||||
|
||||
Library: This is a class, or set of classes used to implement some strategy.
|
||||
These will typically NOT implement a full opmode. Instead they will be included
|
||||
by an OpMode to provide some stand-alone capability.
|
||||
locate based on their name. These OpModes may not produce a drivable robot.
|
||||
|
||||
After the prefix, other conventions will apply:
|
||||
|
||||
* Sensor class names should constructed as: Sensor - Company - Type
|
||||
* Hardware class names should be constructed as: Hardware - Robot type
|
||||
* Robot class names should be constructed as: Robot - Mode - Action - OpModetype
|
||||
* Robot class names should be constructed as: Robot - Mode - Action - OpModetype
|
||||
* Concept class names should be constructed as: Concept - Topic - OpModetype
|
||||
* Library class names should be constructed as: Library - Topic - OpModetype
|
||||
|
||||
### Sample OpMode Content/Style
|
||||
|
||||
|
@ -306,9 +306,9 @@ public class FtcRobotControllerActivity extends Activity
|
||||
preferencesHelper.writeBooleanPrefIfDifferent(context.getString(R.string.pref_rc_connected), true);
|
||||
preferencesHelper.getSharedPreferences().registerOnSharedPreferenceChangeListener(sharedPreferencesListener);
|
||||
|
||||
// Check if this RC app is from a later FTC season that what was installed previously
|
||||
// Check if this RC app is from a later FTC season than what was installed previously
|
||||
int ftcSeasonYearOfPreviouslyInstalledRc = preferencesHelper.readInt(getString(R.string.pref_ftc_season_year_of_current_rc), 0);
|
||||
int ftcSeasonYearOfCurrentlyInstalledRc = AppUtil.getInstance().getFtcSeasonYear(YearMonth.now()).getValue();
|
||||
int ftcSeasonYearOfCurrentlyInstalledRc = AppUtil.getInstance().getFtcSeasonYear(AppUtil.getInstance().getLocalSdkBuildMonth()).getValue();
|
||||
if (ftcSeasonYearOfCurrentlyInstalledRc > ftcSeasonYearOfPreviouslyInstalledRc) {
|
||||
preferencesHelper.writeIntPrefIfDifferent(getString(R.string.pref_ftc_season_year_of_current_rc), ftcSeasonYearOfCurrentlyInstalledRc);
|
||||
// Since it's a new FTC season, we should reset certain settings back to their default values.
|
||||
@ -395,10 +395,9 @@ public class FtcRobotControllerActivity extends Activity
|
||||
readNetworkType();
|
||||
ServiceController.startService(FtcRobotControllerWatchdogService.class);
|
||||
bindToService();
|
||||
logPackageVersions();
|
||||
logDeviceSerialNumber();
|
||||
AndroidBoard.getInstance().logAndroidBoardInfo();
|
||||
RobotLog.logAppInfo();
|
||||
RobotLog.logDeviceInfo();
|
||||
AndroidBoard.getInstance().logAndroidBoardInfo();
|
||||
|
||||
if (preferencesHelper.readBoolean(getString(R.string.pref_wifi_automute), false)) {
|
||||
initWifiMute(true);
|
||||
@ -499,19 +498,6 @@ public class FtcRobotControllerActivity extends Activity
|
||||
}
|
||||
}
|
||||
|
||||
protected void logPackageVersions() {
|
||||
RobotLog.logBuildConfig(com.qualcomm.ftcrobotcontroller.BuildConfig.class);
|
||||
RobotLog.logBuildConfig(com.qualcomm.robotcore.BuildConfig.class);
|
||||
RobotLog.logBuildConfig(com.qualcomm.hardware.BuildConfig.class);
|
||||
RobotLog.logBuildConfig(com.qualcomm.ftccommon.BuildConfig.class);
|
||||
RobotLog.logBuildConfig(com.google.blocks.BuildConfig.class);
|
||||
RobotLog.logBuildConfig(org.firstinspires.inspection.BuildConfig.class);
|
||||
}
|
||||
|
||||
protected void logDeviceSerialNumber() {
|
||||
RobotLog.ii(TAG, "Android device serial number: " + Device.getSerialNumberOrUnknown());
|
||||
}
|
||||
|
||||
protected void readNetworkType() {
|
||||
// Control hubs are always running the access point model. Everything else, for the time
|
||||
// being always runs the Wi-Fi Direct model.
|
||||
@ -639,8 +625,8 @@ public class FtcRobotControllerActivity extends Activity
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the orientation of monitorContainer (which contains cameraMonitorView and
|
||||
* tfodMonitorView) based on the given configuration. Makes the children split the space.
|
||||
* Updates the orientation of monitorContainer (which contains cameraMonitorView)
|
||||
* based on the given configuration. Makes the children split the space.
|
||||
*/
|
||||
private void updateMonitorLayout(Configuration configuration) {
|
||||
LinearLayout monitorContainer = (LinearLayout) findViewById(R.id.monitorContainer);
|
||||
|
@ -140,13 +140,6 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
android:layout_weight="1"
|
||||
android:orientation="vertical"
|
||||
/>
|
||||
<FrameLayout
|
||||
android:id="@+id/tfodMonitorViewId"
|
||||
android:visibility="gone"
|
||||
android:layout_width="match_parent"
|
||||
android:layout_height="0dp"
|
||||
android:layout_weight="1"
|
||||
/>
|
||||
|
||||
</LinearLayout>
|
||||
|
||||
|
@ -65,7 +65,7 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
<item>@style/AppThemeTealRC</item>
|
||||
</integer-array>
|
||||
|
||||
<string name="pref_ftc_season_year_of_current_rc">pref_ftc_season_year_of_current_rc</string>
|
||||
<string translatable="false" name="pref_ftc_season_year_of_current_rc">pref_ftc_season_year_of_current_rc_new</string>
|
||||
|
||||
<string name="packageName">@string/packageNameRobotController</string>
|
||||
|
||||
|
200
README.md
200
README.md
@ -1,16 +1,21 @@
|
||||
## NOTICE
|
||||
|
||||
This repository contains the public FTC SDK for the Freight Frenzy (2021-2022) competition season.
|
||||
This repository contains the public FTC SDK for the POWERPLAY (2022-2023) competition season.
|
||||
|
||||
## Welcome!
|
||||
This GitHub repository contains the source code that is used to build an Android app to control a *FIRST* Tech Challenge competition robot. To use this SDK, download/clone the entire project to your local computer.
|
||||
|
||||
## Requirements
|
||||
To use this Android Studio project, you will need Android Studio 2021.2 (codename Chipmunk) or later.
|
||||
|
||||
To program your robot in Blocks or OnBot Java, you do not need Android Studio.
|
||||
|
||||
## Getting Started
|
||||
If you are new to robotics or new to the *FIRST* Tech Challenge, then you should consider reviewing the [FTC Blocks Tutorial](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/Blocks-Tutorial) to get familiar with how to use the control system:
|
||||
If you are new to robotics or new to the *FIRST* Tech Challenge, then you should consider reviewing the [FTC Blocks Tutorial](https://ftc-docs.firstinspires.org/programming_resources/blocks/Blocks-Tutorial.html) to get familiar with how to use the control system:
|
||||
|
||||
[FTC Blocks Online Tutorial](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/Blocks-Tutorial)
|
||||
[FTC Blocks Online Tutorial](https://ftc-docs.firstinspires.org/programming_resources/blocks/Blocks-Tutorial.html)
|
||||
|
||||
Even if you are an advanced Java programmer, it is helpful to start with the [FTC Blocks tutorial](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/Blocks-Tutorial), and then migrate to the [OnBot Java Tool](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/OnBot-Java-Tutorial) or to [Android Studio](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki/Android-Studio-Tutorial) afterwards.
|
||||
Even if you are an advanced Java programmer, it is helpful to start with the [FTC Blocks tutorial](https://ftc-docs.firstinspires.org/programming_resources/blocks/Blocks-Tutorial.html), and then migrate to the [OnBot Java Tool](https://ftc-docs.firstinspires.org/programming_resources/onbot_java/OnBot-Java-Tutorial.html) or to [Android Studio](https://ftc-docs.firstinspires.org/programming_resources/android_studio_java/Android-Studio-Tutorial.html) afterwards.
|
||||
|
||||
## Downloading the Project
|
||||
If you are an Android Studio programmer, there are several ways to download this repo. Note that if you use the Blocks or OnBot Java Tool to program your robot, then you do not need to download this repository.
|
||||
@ -31,7 +36,7 @@ Once you have downloaded and uncompressed (if needed) your folder, you can use A
|
||||
### User Documentation and Tutorials
|
||||
*FIRST* maintains online documentation with information and tutorials on how to use the *FIRST* Tech Challenge software and robot control system. You can access this documentation using the following link:
|
||||
|
||||
[FtcRobotController Online Documentation](https://github.com/FIRST-Tech-Challenge/FtcRobotController/wiki)
|
||||
[FIRST Tech Challenge Documentation](https://ftc-docs.firstinspires.org/index.html)
|
||||
|
||||
Note that the online documentation is an "evergreen" document that is constantly being updated and edited. It contains the most current information about the *FIRST* Tech Challenge software and control system.
|
||||
|
||||
@ -41,9 +46,9 @@ The Javadoc reference documentation for the FTC SDK is now available online. Cl
|
||||
[FTC Javadoc Documentation](https://javadoc.io/doc/org.firstinspires.ftc)
|
||||
|
||||
### Online User Forum
|
||||
For technical questions regarding the Control System or the FTC SDK, please visit the FTC Technology forum:
|
||||
For technical questions regarding the Control System or the FTC SDK, please visit the FIRST Tech Challenge Community site:
|
||||
|
||||
[FTC Technology Forum](https://ftcforum.firstinspires.org/forum/ftc-technology)
|
||||
[FIRST Tech Challenge Community](https://ftc-community.firstinspires.org/)
|
||||
|
||||
### Sample OpModes
|
||||
This project contains a large selection of Sample OpModes (robot code examples) which can be cut and pasted into your /teamcode folder to be used as-is, or modified to suit your team's needs.
|
||||
@ -54,10 +59,189 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
|
||||
|
||||
# Release Information
|
||||
|
||||
## Version 8.2 (20230707-131020)
|
||||
|
||||
### Breaking changes
|
||||
* Non-linear (iterative) Op Modes are no longer allowed to manipulate actuators in their `stop()` method. Attempts to do so will be ignored and logged.
|
||||
* When an Op Mode attempts to illegally manipulate an actuator, the Robot Controller will print a log message
|
||||
including the text `CANCELLED_FOR_SAFETY`.
|
||||
* Additionally, LinearOpModes are no longer able to regain the ability to manipulate actuators by removing their
|
||||
thread's interrupt or using another thread.
|
||||
* Removes support for Android version 6.0 (Marshmallow). The minSdkVersion is now 24.
|
||||
* Increases the Robocol version.
|
||||
* This means an 8.2 or later Robot Controller or Driver Station will not be able to communicate with an 8.1 or earlier Driver Station or Robot Controller.
|
||||
* If you forget to update both apps at the same time, an error message will be shown explaining which app is older and should be updated.
|
||||
* FTC_FieldCoordinateSystemDefinition.pdf has been moved. It is still in the git history, but has been removed from the git snapshot corresponding with the 8.2 tag. The official version now lives at [Field Coordinate System](https://ftc-docs.firstinspires.org/field-coordinate-system).
|
||||
* `LynxUsbDevice.addConfiguredModule()` and `LynxUsbDevice.getConfiguredModule()` have been replaced with `LynxUsbDevice.getOrAddModule()`.
|
||||
|
||||
### New features
|
||||
* Adds new `VisionPortal` API for computer vision
|
||||
* **This API may be subject to change for final kickoff release!**
|
||||
* Several new samples added.
|
||||
* Adds support for detecting AprilTags.
|
||||
* `VisionPortal` is the new entry point for both AprilTag and TFOD processing.
|
||||
* Vuforia will be removed in a future release.
|
||||
* Updated TensorFlow dependencies.
|
||||
* Added support for webcam camera controls to blocks.
|
||||
* Previous blocks for Vuforia and TensorFlow Object Detection are obsolete.
|
||||
* Related documentation for associated technologies can be found at
|
||||
* [AprilTag Introduction](https://ftc-docs.firstinspires.org/apriltag-intro)
|
||||
* [AprilTag SDK Guide](https://ftc-docs.firstinspires.org/apriltag-sdk)
|
||||
* [AprilTag Detection Values](https://ftc-docs.firstinspires.org/apriltag-detection-values)
|
||||
* [AprilTag Test Images](https://ftc-docs.firstinspires.org/apriltag-test-images)
|
||||
* [Camera Calibration](https://ftc-docs.firstinspires.org/camera-calibration)
|
||||
* Adds Driver Station support for Logitech Dual Action and Sony PS5 DualSense gamepads.
|
||||
* This **does not** include support for the Sony PS5 DualSense Edge gamepad.
|
||||
* Always refer to Game Manual 1 to determine gamepad legality in competition.
|
||||
* Adds support for MJPEG payload streaming to UVC driver (external JPEG decompression routine required for use).
|
||||
* Shows a hint on the Driver Station UI about how to bind a gamepad when buttons are pressed or the sticks are moved on an unbound gamepad.
|
||||
* Adds option for fullscreening "Camera Stream" on Driver Station.
|
||||
* OnBotJava source code is automatically saved as a ZIP file on every build with a rolling window of the last 30 builds kept; allows recovering source code from previous builds if code is accidentally deleted or corrupted.
|
||||
* Adds support for changing the addresses of Expansion Hubs that are not connected directly via USB.
|
||||
* The Expansion Hub Address Change screen now has an Apply button that changes the addresses without leaving the screen.
|
||||
* Addresses that are assigned to other hubs connected to the same USB connection or Control Hub are no longer able to be selected.
|
||||
* Increases maximum size of Blocks inline comments to 100 characters
|
||||
* Saves position of open Blocks comment balloons
|
||||
* Adds new AprilTag Driving samples: RobotDriveToAprilTagTank & RobotDriveToAprilTagOmni
|
||||
* Adds Sample to illustrate optimizing camera exposure for AprilTags: ConceptAprilTagOptimizeExposure
|
||||
|
||||
### Bug Fixes
|
||||
* Corrects inspection screen to report app version using the SDK version defined in the libraries instead of the version specified in `AndroidManifest.xml`. This corrects the case where the app could show matching versions numbers to the user but still state that the versions did not match.
|
||||
* If the version specified in `AndroidManifest.xml` does not match the SDK version, an SDK version entry will be displayed on the Manage webpage.
|
||||
* Fixes no error being displayed when saving a configuration file with duplicate names from the Driver Station.
|
||||
* Fixes a deadlock in the UVC driver which manifested in https://github.com/OpenFTC/EasyOpenCV/issues/57.
|
||||
* Fixes a deadlock in the UVC driver that could occur when hot-plugging cameras.
|
||||
* Fixes UVC driver compatibility with Arducam OV9281 global shutter camera.
|
||||
* Fixes Emergency Stop condition when an OnBotJava build with duplicate Op Mode names occurs.
|
||||
* Fixes known causes of "Attempted use of a closed LynxModule instance" logspam.
|
||||
* Fixes the visual identification LED pattern when configuring Expansion Hubs connected via RS-485.
|
||||
|
||||
## Version 8.1.1 (20221201-150726)
|
||||
|
||||
This is a bug fix only release to address the following four issues.
|
||||
|
||||
* [Issue #492](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/492) - Can't create new blocks opmodes.
|
||||
* [Issue #495](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/495) - Remove the final modifier from the OpMode's Telemetry object.
|
||||
* [Issue #500](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/500) - Some devices cannot be configured when the Driver Station app has been updated to 8.1
|
||||
* Updating either the Robot Controller app or the Driver Station app to 8.1.1 or later will fix this issue.
|
||||
* The Modern Robotics touch sensor was configurable as a Digital Device. It can only be used as an Analog Device.
|
||||
|
||||
## Version 8.1 (20221121-115119)
|
||||
|
||||
### Breaking Changes
|
||||
* Deprecates the `OpMode` fields `msStuckDetectInit`, `msStuckDetectInitLoop`, `msStuckDetectStart`, `msStuckDetectLoop`, and `msStuckDetectStop`.
|
||||
* Op Modes no longer have a time limit for `init()`, `init_loop()`, `start()` or `loop()`, so the fields corresponding to those methods are no longer used.
|
||||
* `stop()` still has a time limit, but it is now hardcoded to be 1 second, and cannot be changed using `msStuckDetectStop`.
|
||||
* Deprecates the `OpMode` methods `internalPreInit()`, `internalPostInitLoop()`, and `internalPostLoop()`.
|
||||
* Iterative `OpMode`s will continue to call these methods in case they were overridden.
|
||||
* These methods will not be called at all for `LinearOpMode`s.
|
||||
* Deprecates (and stops respecting) `DeviceProperties.xmlTagAliases`.
|
||||
|
||||
### Enhancements
|
||||
* Adds a new `IMU` interface to Blocks and Java that can be used with both the original BNO055 IMU
|
||||
included in all older Control Hubs and Expansion Hubs, and the new alternative BHI260AP IMU.
|
||||
* You can determine which type of IMU is in your Control Hub by navigating to the Manage page of the web interface.
|
||||
* To learn how to use the new `IMU` interface, see https://ftc-docs.firstinspires.org/programming_resources/imu/imu.html. The `SensorIMU` Blocks sample was also updated to use the new `IMU` interface, and the following Java samples were added:
|
||||
* `SensorIMUOrthogonal`
|
||||
* Use this sample if your REV Hub is mounted so that it is parallel or perpendicular to the
|
||||
bottom of your robot.
|
||||
* `SensorIMUNonOrthogonal`
|
||||
* Use this sample if your REV Hub is mounted to your robot in any other orientation
|
||||
* `ConceptExploringIMUOrientations`
|
||||
* This Op Mode is a tool to help you understand how the orthogonal orientations work, and
|
||||
which one applies to your robot.
|
||||
* The BHI260AP IMU can only be accessed via the new `IMU` interface. The BNO055 IMU can be
|
||||
programmed using the new `IMU` interface, or you can continue to program it using the old `BNO055IMU`
|
||||
interface. If you want to be able to quickly switch to a new Control Hub that may contain the
|
||||
BHI260AP IMU, you should migrate your code to use the new `IMU` interface.
|
||||
* Unlike the old `BNO055IMU` interface, which only worked correctly when the REV Hub was mounted flat
|
||||
on your robot, the `IMU` interface allows you to specify the orientation of the REV Hub on your
|
||||
robot. It will account for this, and give you your orientation in a Robot Coordinate System,
|
||||
instead of a special coordinate system for the REV Hub. As a result, your pitch and yaw will be
|
||||
0 when your *robot* is level, instead of when the REV Hub is level, which will result in much
|
||||
more reliable orientation angle values for most mounting orientations.
|
||||
* Because of the new robot-centric coordinate system, the pitch and roll angles returned by the
|
||||
`IMU` interface will be different from the ones returned by the `BNO055IMU` interface. When you are
|
||||
migrating your code, pay careful attention to the documentation.
|
||||
* If you have calibrated your BNO055, you can provide that calibration data to the new `IMU`
|
||||
interface by passing a `BNO055IMUNew.Parameters` instance to `IMU.initialize()`.
|
||||
* The `IMU` interface is also suitable for implementation by third-party vendors for IMUs that
|
||||
support providing the orientation in the form of a quaternion.
|
||||
* Iterative `OpMode`s (as opposed to `LinearOpMode`s) now run on a dedicated thread.
|
||||
* Cycle times should not be as impacted by everything else going on in the system.
|
||||
* Slow `OpMode`s can no longer increase the amount of time it takes to process network commands, and vice versa.
|
||||
* The `init()`, `init_loop()`, `start()` and `loop()` methods no longer need to return within a certain time frame.
|
||||
* BNO055 IMU legacy driver: restores the ability to initialize in one Op Mode, and then have another Op Mode re-use that
|
||||
initialization. This allows you to maintain the 0-yaw position between Op Modes, if desired.
|
||||
* Allows customized versions of device drivers in the FTC SDK to use the same XML tag.
|
||||
* Before, if you wanted to customize a device driver, you had to copy it to a new class _and_ give
|
||||
it a new XML tag. Giving it a new XML tag meant that to switch which driver was being used, you
|
||||
had to modify your configuration file.
|
||||
* Now, to use your custom driver, all you have to do is specify your custom driver's class when
|
||||
calling `hardwareMap.get()`. To go back to the original driver, specify the original driver
|
||||
class. If you specify an interface that is implemented by both the original driver and the
|
||||
custom driver, there is no guarantee about which implementation will be returned.
|
||||
|
||||
### Bug Fixes
|
||||
* Fixes accessing the "Manage TensorFlow Lite Models" and "Manage Sounds" links and performing
|
||||
Blocks and OnBotJava Op Mode downloads from the REV Hardware Client.
|
||||
* Fixes issue where an I2C device driver would be auto-initialized using the parameters assigned in
|
||||
a previous Op Mode run.
|
||||
* Improves Driver Station popup menu placement in the landscape layout.
|
||||
* Fixes NullPointerException when attempting to get a non-configured BNO055 IMU in a Blocks Op Mode on an RC phone.
|
||||
* Fixes problem with Blocks if a variable is named `orientation`.
|
||||
|
||||
## Version 8.0 (20220907-131644)
|
||||
|
||||
### Breaking Changes
|
||||
* Increases the Robocol version.
|
||||
* This means an 8.0 or later Robot Controller or Driver Station will not be able to communicate with a 7.2 or earlier Driver Station or Robot Controller.
|
||||
* If you forget to update both apps at the same time, an error message will be shown explaining which app is older and should be updated.
|
||||
* Initializing I2C devices now happens when you retrieve them from the `HardwareMap` for the first time.
|
||||
* Previously, all I2C devices would be initialized before the Op Mode even began executing,
|
||||
whether you were actually going to use them or not. This could result in reduced performance and
|
||||
unnecessary warnings.
|
||||
* With this change, it is very important for Java users to retrieve all needed devices from the
|
||||
`HardwareMap` **during the Init phase of the Op Mode**. Namely, declare a variable for each hardware
|
||||
device the Op Mode will use, and assign a value to each. Do not do this during the Run phase, or your
|
||||
Op Mode may briefly hang while the devices you are retrieving get initialized.
|
||||
* Op Modes that do not use all of the I2C devices specified in the configuration file should take
|
||||
less time to initialize. Op Modes that do use all of the specified I2C devices should take the
|
||||
same amount of time as previously.
|
||||
* Fixes [issue #251](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues/251) by changing the order in which axis rotation rates are read from the angular velocity vector in the BNO055 IMU driver.
|
||||
* Deprecates `pitchMode` in `BNO055IMU.Parameters`.
|
||||
* Setting `pitchMode` to `PitchMode.WINDOWS` would break the coordinate conventions used by the driver.
|
||||
* Moves `OpModeManagerImpl` to the `com.qualcomm.robotcore.eventloop.opmode` package.
|
||||
* This breaks third party libraries EasyOpenCV (version 1.5.1 and earlier) and FTC Dashboard (version 0.4.4 and earlier).
|
||||
* Deletes the deprecated `OpMode` method `resetStartTime()` (use `resetRuntime()` instead).
|
||||
* Deletes the protected `LinearOpMode.LinearOpModeHelper` class (which was not meant for use by Op Modes).
|
||||
* Removes I2C Device (Synchronous) config type (deprecated since 2018)
|
||||
|
||||
### Enhancements
|
||||
* Uncaught exceptions in Op Modes no longer require a Restart Robot
|
||||
* A blue screen popping up with a stacktrace is not an SDK error; this replaces the red text in the telemetry area.
|
||||
* Since the very first SDK release, Op Mode crashes have put the robot into "EMERGENCY STOP" state, only showing the first line of the exception, and requiring the user to press "Restart Robot" to continue
|
||||
* Exceptions during an Op Mode now open a popup window with the same color scheme as the log viewer, containing 15 lines of the exception stacktrace to allow easily tracing down the offending line without needing to connect to view logs over ADB or scroll through large amounts of logs in the log viewer.
|
||||
* The exception text in the popup window is both zoomable and scrollable just like a webpage.
|
||||
* Pressing the "OK" button in the popup window will return to the main screen of the Driver Station and allow an Op Mode to be run again immediately, without the need to perform a "Restart Robot"
|
||||
* Adds new Java sample to demonstrate using a hardware class to abstract robot actuators, and share them across multiple Op Modes.
|
||||
* Sample Op Mode is [ConceptExternalHardwareClass.java](FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/ConceptExternalHardwareClass.java)
|
||||
* Abstracted hardware class is [RobotHardware.java](FtcRobotController/src/main/java/org/firstinspires/ftc/robotcontroller/external/samples/RobotHardware.java)
|
||||
* Updates RobotAutoDriveByGyro_Linear Java sample to use REV Control/Expansion hub IMU.
|
||||
* Updates Vuforia samples to reference PowerPlay assets and have correct names and field locations of image targets.
|
||||
* Updates TensorFlow samples to reference PowerPlay assets.
|
||||
* Adds opt-in support for Java 8 language features to the OnBotJava editor.
|
||||
* To opt in, open the OnBotJava Settings, and check `Enable beta Java 8 support`.
|
||||
* Note that Java 8 code will only compile when the Robot Controller runs Android 7.0 Nougat or later.
|
||||
* Please report issues [here](https://github.com/FIRST-Tech-Challenge/FtcRobotController/issues).
|
||||
* In OnBotJava, clicking on build errors now correctly jumps to the correct location.
|
||||
* Improves OnBotJava autocomplete behavior, to provide better completion options in most cases.
|
||||
* Adds a QR code to the Robot Controller Inspection Report when viewed from the Driver Station for scanning by inspectors at competition.
|
||||
* Improves I2C performance and reliability in some scenarios.
|
||||
|
||||
## Version 7.2 (20220723-130006)
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
* Updates the build tooling. For Android Studio users, this change requires Android Studio Chipmunk 2021.2.1.
|
||||
* Removes support for devices that are not competition legal, including Modern Robotics Core Control Modules, the Matrix Controller, and HiTechnic/NXT controllers and sensors. Support remains for Modern Robotics I2C sensors.
|
||||
|
||||
|
@ -17,6 +17,10 @@ apply from: '../build.dependencies.gradle'
|
||||
|
||||
android {
|
||||
namespace = 'org.firstinspires.ftc.teamcode'
|
||||
|
||||
packagingOptions {
|
||||
jniLibs.useLegacyPackaging true
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
|
@ -14,31 +14,41 @@ Sample opmodes exist in the FtcRobotController module.
|
||||
To locate these samples, find the FtcRobotController module in the "Project/Android" tab.
|
||||
|
||||
Expand the following tree elements:
|
||||
FtcRobotController / java / org.firstinspires.ftc.robotcontroller / external / samples
|
||||
FtcRobotController/java/org.firstinspires.ftc.robotcontroller/external/samples
|
||||
|
||||
A range of different samples classes can be seen in this folder.
|
||||
The class names follow a naming convention which indicates the purpose of each class.
|
||||
The full description of this convention is found in the samples/sample_convention.md file.
|
||||
### Naming of Samples
|
||||
|
||||
A brief synopsis of the naming convention is given here:
|
||||
To gain a better understanding of how the samples are organized, and how to interpret the
|
||||
naming system, it will help to understand the conventions that were used during their creation.
|
||||
|
||||
These conventions are described (in detail) in the sample_conventions.md file in this folder.
|
||||
|
||||
To summarize: A range of different samples classes will reside in the java/external/samples.
|
||||
The class names will follow a naming convention which indicates the purpose of each class.
|
||||
The prefix of the name will be one of the following:
|
||||
|
||||
* Basic: This is a minimally functional OpMode used to illustrate the skeleton/structure
|
||||
Basic: This is a minimally functional OpMode used to illustrate the skeleton/structure
|
||||
of a particular style of OpMode. These are bare bones examples.
|
||||
* Sensor: This is a Sample OpMode that shows how to use a specific sensor.
|
||||
It is not intended as a functioning robot, it is simply showing the minimal code
|
||||
|
||||
Sensor: This is a Sample OpMode that shows how to use a specific sensor.
|
||||
It is not intended to drive a functioning robot, it is simply showing the minimal code
|
||||
required to read and display the sensor values.
|
||||
* Hardware: This is not an actual OpMode, but a helper class that is used to describe
|
||||
one particular robot's hardware devices: eg: for a Pushbot. Look at any
|
||||
Pushbot sample to see how this can be used in an OpMode.
|
||||
Teams can copy one of these to create their own robot definition.
|
||||
* Pushbot: This is a Sample OpMode that uses the Pushbot robot structure as a base.
|
||||
* Concept: This is a sample OpMode that illustrates performing a specific function or concept.
|
||||
|
||||
Robot: This is a Sample OpMode that assumes a simple two-motor (differential) drive base.
|
||||
It may be used to provide a common baseline driving OpMode, or
|
||||
to demonstrate how a particular sensor or concept can be used to navigate.
|
||||
|
||||
Concept: This is a sample OpMode that illustrates performing a specific function or concept.
|
||||
These may be complex, but their operation should be explained clearly in the comments,
|
||||
or the header should reference an external doc, guide or tutorial.
|
||||
* Library: This is a class, or set of classes used to implement some strategy.
|
||||
These will typically NOT implement a full OpMode. Instead they will be included
|
||||
by an OpMode to provide some stand-alone capability.
|
||||
or the comments should reference an external doc, guide or tutorial.
|
||||
Each OpMode should try to only demonstrate a single concept so they are easy to
|
||||
locate based on their name. These OpModes may not produce a drivable robot.
|
||||
|
||||
After the prefix, other conventions will apply:
|
||||
|
||||
* Sensor class names are constructed as: Sensor - Company - Type
|
||||
* Robot class names are constructed as: Robot - Mode - Action - OpModetype
|
||||
* Concept class names are constructed as: Concept - Topic - OpModetype
|
||||
|
||||
Once you are familiar with the range of samples available, you can choose one to be the
|
||||
basis for your own robot. In all cases, the desired sample(s) needs to be copied into
|
||||
@ -50,7 +60,7 @@ This is done inside Android Studio directly, using the following steps:
|
||||
|
||||
2) Right click on the sample class and select "Copy"
|
||||
|
||||
3) Expand the TeamCode / java folder
|
||||
3) Expand the TeamCode/java folder
|
||||
|
||||
4) Right click on the org.firstinspires.ftc.teamcode folder and select "Paste"
|
||||
|
||||
|
@ -54,7 +54,7 @@ android {
|
||||
defaultConfig {
|
||||
signingConfig signingConfigs.debug
|
||||
applicationId 'com.qualcomm.ftcrobotcontroller'
|
||||
minSdkVersion 23
|
||||
minSdkVersion 24
|
||||
//noinspection ExpiredTargetSdkVersion
|
||||
targetSdkVersion 28
|
||||
|
||||
@ -122,8 +122,5 @@ android {
|
||||
}
|
||||
|
||||
repositories {
|
||||
flatDir {
|
||||
dirs rootProject.file('libs')
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,22 +1,21 @@
|
||||
repositories {
|
||||
mavenCentral()
|
||||
google() // Needed for androidx
|
||||
flatDir {
|
||||
dirs rootProject.file('libs')
|
||||
}
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation 'org.firstinspires.ftc:Inspection:7.2.0'
|
||||
implementation 'org.firstinspires.ftc:Blocks:7.2.0'
|
||||
implementation 'org.firstinspires.ftc:Tfod:7.2.0'
|
||||
implementation 'org.firstinspires.ftc:RobotCore:7.2.0'
|
||||
implementation 'org.firstinspires.ftc:RobotServer:7.2.0'
|
||||
implementation 'org.firstinspires.ftc:OnBotJava:7.2.0'
|
||||
implementation 'org.firstinspires.ftc:Hardware:7.2.0'
|
||||
implementation 'org.firstinspires.ftc:FtcCommon:7.2.0'
|
||||
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.2.0'
|
||||
implementation 'org.firstinspires.ftc:Inspection:8.2.0'
|
||||
implementation 'org.firstinspires.ftc:Blocks:8.2.0'
|
||||
implementation 'org.firstinspires.ftc:Tfod:8.2.0'
|
||||
implementation 'org.firstinspires.ftc:RobotCore:8.2.0'
|
||||
implementation 'org.firstinspires.ftc:RobotServer:8.2.0'
|
||||
implementation 'org.firstinspires.ftc:OnBotJava:8.2.0'
|
||||
implementation 'org.firstinspires.ftc:Hardware:8.2.0'
|
||||
implementation 'org.firstinspires.ftc:FtcCommon:8.2.0'
|
||||
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.4.3'
|
||||
runtimeOnly 'org.tensorflow:tensorflow-lite:2.12.0'
|
||||
implementation 'androidx.appcompat:appcompat:1.2.0'
|
||||
implementation 'org.firstinspires.ftc:gameAssets-FreightFrenzy:1.0.0'
|
||||
implementation 'org.firstinspires.ftc:gameAssets-PowerPlay:1.0.0'
|
||||
implementation 'org.firstinspires.ftc:Vision:8.2.0'
|
||||
}
|
||||
|
||||
|
@ -10,6 +10,7 @@ buildscript {
|
||||
google()
|
||||
}
|
||||
dependencies {
|
||||
// Note for FTC Teams: Do not modify this yourself.
|
||||
classpath 'com.android.tools.build:gradle:7.2.0'
|
||||
}
|
||||
}
|
||||
@ -25,8 +26,4 @@ allprojects {
|
||||
|
||||
repositories {
|
||||
mavenCentral()
|
||||
|
||||
flatDir {
|
||||
dirs '../libs'
|
||||
}
|
||||
}
|
||||
|
@ -5,3 +5,6 @@ android.useAndroidX=true
|
||||
|
||||
# Automatically convert third-party libraries to use AndroidX
|
||||
android.enableJetifier=true
|
||||
|
||||
# Allow Gradle to use up to 1 GB of RAM
|
||||
org.gradle.jvmargs=-Xmx1024M
|
||||
|
Reference in New Issue
Block a user