Compare commits
12 Commits
20230830-1
...
63158215d0
Author | SHA1 | Date | |
---|---|---|---|
63158215d0 | |||
6e6e626816 | |||
b5aca8b93c | |||
8a0aa126b6 | |||
6938e09b2b | |||
e86c31771e | |||
0baf961680 | |||
1da45a36c6 | |||
c303962469 | |||
f326c0d033 | |||
c023e97a6a | |||
f3a5a54f67 |
@ -1,8 +1,8 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
<?xml version="1.0" encoding="utf-8"?>
|
||||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||||
xmlns:tools="http://schemas.android.com/tools"
|
xmlns:tools="http://schemas.android.com/tools"
|
||||||
android:versionCode="51"
|
android:versionCode="53"
|
||||||
android:versionName="9.0">
|
android:versionName="9.1">
|
||||||
|
|
||||||
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
|
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
|
||||||
|
|
||||||
|
@ -29,6 +29,7 @@
|
|||||||
|
|
||||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||||
|
|
||||||
|
import android.util.Size;
|
||||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
@ -44,6 +45,23 @@ import java.util.List;
|
|||||||
* This OpMode illustrates the basics of AprilTag recognition and pose estimation,
|
* This OpMode illustrates the basics of AprilTag recognition and pose estimation,
|
||||||
* including Java Builder structures for specifying Vision parameters.
|
* including Java Builder structures for specifying Vision parameters.
|
||||||
*
|
*
|
||||||
|
* For an introduction to AprilTags, see the FTC-DOCS link below:
|
||||||
|
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
|
||||||
|
*
|
||||||
|
* In this sample, any visible tag ID will be detected and displayed, but only tags that are included in the default
|
||||||
|
* "TagLibrary" will have their position and orientation information displayed. This default TagLibrary contains
|
||||||
|
* the current Season's AprilTags and a small set of "test Tags" in the high number range.
|
||||||
|
*
|
||||||
|
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
|
||||||
|
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
|
||||||
|
* https://ftc-docs.firstinspires.org/apriltag-detection-values
|
||||||
|
*
|
||||||
|
* To experiment with using AprilTags to navigate, try out these two driving samples:
|
||||||
|
* RobotAutoDriveToAprilTagOmni and RobotAutoDriveToAprilTagTank
|
||||||
|
*
|
||||||
|
* There are many "default" VisionPortal and AprilTag configuration parameters that may be overridden if desired.
|
||||||
|
* These default parameters are shown as comments in the code below.
|
||||||
|
*
|
||||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||||
*/
|
*/
|
||||||
@ -106,6 +124,8 @@ public class ConceptAprilTag extends LinearOpMode {
|
|||||||
|
|
||||||
// Create the AprilTag processor.
|
// Create the AprilTag processor.
|
||||||
aprilTag = new AprilTagProcessor.Builder()
|
aprilTag = new AprilTagProcessor.Builder()
|
||||||
|
|
||||||
|
// The following default settings are available to un-comment and edit as needed.
|
||||||
//.setDrawAxes(false)
|
//.setDrawAxes(false)
|
||||||
//.setDrawCubeProjection(false)
|
//.setDrawCubeProjection(false)
|
||||||
//.setDrawTagOutline(true)
|
//.setDrawTagOutline(true)
|
||||||
@ -117,11 +137,19 @@ public class ConceptAprilTag extends LinearOpMode {
|
|||||||
// If you do not manually specify calibration parameters, the SDK will attempt
|
// If you do not manually specify calibration parameters, the SDK will attempt
|
||||||
// to load a predefined calibration for your camera.
|
// to load a predefined calibration for your camera.
|
||||||
//.setLensIntrinsics(578.272, 578.272, 402.145, 221.506)
|
//.setLensIntrinsics(578.272, 578.272, 402.145, 221.506)
|
||||||
|
|
||||||
// ... these parameters are fx, fy, cx, cy.
|
// ... these parameters are fx, fy, cx, cy.
|
||||||
|
|
||||||
.build();
|
.build();
|
||||||
|
|
||||||
|
// Adjust Image Decimation to trade-off detection-range for detection-rate.
|
||||||
|
// eg: Some typical detection data using a Logitech C920 WebCam
|
||||||
|
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
|
||||||
|
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
|
||||||
|
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second (default)
|
||||||
|
// Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second (default)
|
||||||
|
// Note: Decimation can be changed on-the-fly to adapt during a match.
|
||||||
|
//aprilTag.setDecimation(3);
|
||||||
|
|
||||||
// Create the vision portal by using a builder.
|
// Create the vision portal by using a builder.
|
||||||
VisionPortal.Builder builder = new VisionPortal.Builder();
|
VisionPortal.Builder builder = new VisionPortal.Builder();
|
||||||
|
|
||||||
@ -136,7 +164,7 @@ public class ConceptAprilTag extends LinearOpMode {
|
|||||||
//builder.setCameraResolution(new Size(640, 480));
|
//builder.setCameraResolution(new Size(640, 480));
|
||||||
|
|
||||||
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
|
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
|
||||||
//builder.enableCameraMonitoring(true);
|
//builder.enableLiveView(true);
|
||||||
|
|
||||||
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
|
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
|
||||||
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
|
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
|
||||||
|
@ -44,6 +44,20 @@ import java.util.List;
|
|||||||
* This OpMode illustrates the basics of AprilTag recognition and pose estimation, using
|
* This OpMode illustrates the basics of AprilTag recognition and pose estimation, using
|
||||||
* the easy way.
|
* the easy way.
|
||||||
*
|
*
|
||||||
|
* For an introduction to AprilTags, see the FTC-DOCS link below:
|
||||||
|
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
|
||||||
|
*
|
||||||
|
* In this sample, any visible tag ID will be detected and displayed, but only tags that are included in the default
|
||||||
|
* "TagLibrary" will have their position and orientation information displayed. This default TagLibrary contains
|
||||||
|
* the current Season's AprilTags and a small set of "test Tags" in the high number range.
|
||||||
|
*
|
||||||
|
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
|
||||||
|
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
|
||||||
|
* https://ftc-docs.firstinspires.org/apriltag-detection-values
|
||||||
|
*
|
||||||
|
* To experiment with using AprilTags to navigate, try out these two driving samples:
|
||||||
|
* RobotAutoDriveToAprilTagOmni and RobotAutoDriveToAprilTagTank
|
||||||
|
*
|
||||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||||
*/
|
*/
|
||||||
|
@ -29,6 +29,7 @@
|
|||||||
|
|
||||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||||
|
|
||||||
|
import android.util.Size;
|
||||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
@ -53,6 +54,17 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
|
|||||||
|
|
||||||
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
|
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
|
||||||
|
|
||||||
|
// TFOD_MODEL_ASSET points to a model file stored in the project Asset location,
|
||||||
|
// this is only used for Android Studio when using models in Assets.
|
||||||
|
private static final String TFOD_MODEL_ASSET = "MyModelStoredAsAsset.tflite";
|
||||||
|
// TFOD_MODEL_FILE points to a model file stored onboard the Robot Controller's storage,
|
||||||
|
// this is used when uploading models directly to the RC using the model upload interface.
|
||||||
|
private static final String TFOD_MODEL_FILE = "/sdcard/FIRST/tflitemodels/myCustomModel.tflite";
|
||||||
|
// Define the labels recognized in the model for TFOD (must be in training order!)
|
||||||
|
private static final String[] LABELS = {
|
||||||
|
"Pixel",
|
||||||
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The variable to store our instance of the TensorFlow Object Detection processor.
|
* The variable to store our instance of the TensorFlow Object Detection processor.
|
||||||
*/
|
*/
|
||||||
@ -107,11 +119,16 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
|
|||||||
// Create the TensorFlow processor by using a builder.
|
// Create the TensorFlow processor by using a builder.
|
||||||
tfod = new TfodProcessor.Builder()
|
tfod = new TfodProcessor.Builder()
|
||||||
|
|
||||||
// Use setModelAssetName() if the TF Model is built in as an asset.
|
// With the following lines commented out, the default TfodProcessor Builder
|
||||||
// Use setModelFileName() if you have downloaded a custom team model to the Robot Controller.
|
// will load the default model for the season. To define a custom model to load,
|
||||||
|
// choose one of the following:
|
||||||
|
// Use setModelAssetName() if the custom TF Model is built in as an asset (AS only).
|
||||||
|
// Use setModelFileName() if you have downloaded a custom team model to the Robot Controller.
|
||||||
//.setModelAssetName(TFOD_MODEL_ASSET)
|
//.setModelAssetName(TFOD_MODEL_ASSET)
|
||||||
//.setModelFileName(TFOD_MODEL_FILE)
|
//.setModelFileName(TFOD_MODEL_FILE)
|
||||||
|
|
||||||
|
// The following default settings are available to un-comment and edit as needed to
|
||||||
|
// set parameters for custom models.
|
||||||
//.setModelLabels(LABELS)
|
//.setModelLabels(LABELS)
|
||||||
//.setIsModelTensorFlow2(true)
|
//.setIsModelTensorFlow2(true)
|
||||||
//.setIsModelQuantized(true)
|
//.setIsModelQuantized(true)
|
||||||
@ -134,7 +151,7 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
|
|||||||
//builder.setCameraResolution(new Size(640, 480));
|
//builder.setCameraResolution(new Size(640, 480));
|
||||||
|
|
||||||
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
|
// Enable the RC preview (LiveView). Set "false" to omit camera monitoring.
|
||||||
//builder.enableCameraMonitoring(true);
|
//builder.enableLiveView(true);
|
||||||
|
|
||||||
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
|
// Set the stream format; MJPEG uses less bandwidth than default YUY2.
|
||||||
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
|
//builder.setStreamFormat(VisionPortal.StreamFormat.YUY2);
|
||||||
|
@ -49,6 +49,13 @@ import java.util.concurrent.TimeUnit;
|
|||||||
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
|
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
|
||||||
* The code assumes a Holonomic (Mecanum or X Drive) Robot.
|
* The code assumes a Holonomic (Mecanum or X Drive) Robot.
|
||||||
*
|
*
|
||||||
|
* For an introduction to AprilTags, see the ftc-docs link below:
|
||||||
|
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
|
||||||
|
*
|
||||||
|
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
|
||||||
|
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
|
||||||
|
* https://ftc-docs.firstinspires.org/apriltag-detection-values
|
||||||
|
*
|
||||||
* The drive goal is to rotate to keep the Tag centered in the camera, while strafing to be directly in front of the tag, and
|
* The drive goal is to rotate to keep the Tag centered in the camera, while strafing to be directly in front of the tag, and
|
||||||
* driving towards the tag to achieve the desired distance.
|
* driving towards the tag to achieve the desired distance.
|
||||||
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
|
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
|
||||||
@ -102,7 +109,7 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
|
|||||||
private DcMotor rightBackDrive = null; // Used to control the right back drive wheel
|
private DcMotor rightBackDrive = null; // Used to control the right back drive wheel
|
||||||
|
|
||||||
private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
|
private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
|
||||||
private static final int DESIRED_TAG_ID = 0; // Choose the tag you want to approach or set to -1 for ANY tag.
|
private static final int DESIRED_TAG_ID = -1; // Choose the tag you want to approach or set to -1 for ANY tag.
|
||||||
private VisionPortal visionPortal; // Used to manage the video source.
|
private VisionPortal visionPortal; // Used to manage the video source.
|
||||||
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
|
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
|
||||||
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
|
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
|
||||||
@ -150,25 +157,33 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
|
|||||||
// Step through the list of detected tags and look for a matching tag
|
// Step through the list of detected tags and look for a matching tag
|
||||||
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
|
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
|
||||||
for (AprilTagDetection detection : currentDetections) {
|
for (AprilTagDetection detection : currentDetections) {
|
||||||
if ((detection.metadata != null) &&
|
// Look to see if we have size info on this tag.
|
||||||
((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) ){
|
if (detection.metadata != null) {
|
||||||
targetFound = true;
|
// Check to see if we want to track towards this tag.
|
||||||
desiredTag = detection;
|
if ((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) {
|
||||||
break; // don't look any further.
|
// Yes, we want to use this tag.
|
||||||
|
targetFound = true;
|
||||||
|
desiredTag = detection;
|
||||||
|
break; // don't look any further.
|
||||||
|
} else {
|
||||||
|
// This tag is in the library, but we do not want to track it right now.
|
||||||
|
telemetry.addData("Skipping", "Tag ID %d is not desired", detection.id);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
telemetry.addData("Unknown Target", "Tag ID %d is not in TagLibrary\n", detection.id);
|
// This tag is NOT in the library, so we don't have enough information to track to it.
|
||||||
|
telemetry.addData("Unknown", "Tag ID %d is not in TagLibrary", detection.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tell the driver what we see, and what to do.
|
// Tell the driver what we see, and what to do.
|
||||||
if (targetFound) {
|
if (targetFound) {
|
||||||
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
|
telemetry.addData("\n>","HOLD Left-Bumper to Drive to Target\n");
|
||||||
telemetry.addData("Target", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
|
telemetry.addData("Found", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
|
||||||
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
|
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
|
||||||
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
|
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
|
||||||
telemetry.addData("Yaw","%3.0f degrees", desiredTag.ftcPose.yaw);
|
telemetry.addData("Yaw","%3.0f degrees", desiredTag.ftcPose.yaw);
|
||||||
} else {
|
} else {
|
||||||
telemetry.addData(">","Drive using joysticks to find valid target\n");
|
telemetry.addData("\n>","Drive using joysticks to find valid target\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
|
// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
|
||||||
@ -243,6 +258,15 @@ public class RobotAutoDriveToAprilTagOmni extends LinearOpMode
|
|||||||
// Create the AprilTag processor by using a builder.
|
// Create the AprilTag processor by using a builder.
|
||||||
aprilTag = new AprilTagProcessor.Builder().build();
|
aprilTag = new AprilTagProcessor.Builder().build();
|
||||||
|
|
||||||
|
// Adjust Image Decimation to trade-off detection-range for detection-rate.
|
||||||
|
// eg: Some typical detection data using a Logitech C920 WebCam
|
||||||
|
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
|
||||||
|
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
|
||||||
|
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second
|
||||||
|
// Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second
|
||||||
|
// Note: Decimation can be changed on-the-fly to adapt during a match.
|
||||||
|
aprilTag.setDecimation(2);
|
||||||
|
|
||||||
// Create the vision portal by using a builder.
|
// Create the vision portal by using a builder.
|
||||||
if (USE_WEBCAM) {
|
if (USE_WEBCAM) {
|
||||||
visionPortal = new VisionPortal.Builder()
|
visionPortal = new VisionPortal.Builder()
|
||||||
|
@ -49,6 +49,13 @@ import java.util.concurrent.TimeUnit;
|
|||||||
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
|
* This OpMode illustrates using a camera to locate and drive towards a specific AprilTag.
|
||||||
* The code assumes a basic two-wheel (Tank) Robot Drivetrain
|
* The code assumes a basic two-wheel (Tank) Robot Drivetrain
|
||||||
*
|
*
|
||||||
|
* For an introduction to AprilTags, see the ftc-docs link below:
|
||||||
|
* https://ftc-docs.firstinspires.org/en/latest/apriltag/vision_portal/apriltag_intro/apriltag-intro.html
|
||||||
|
*
|
||||||
|
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the tag, relative to the camera.
|
||||||
|
* This information is provided in the "ftcPose" member of the returned "detection", and is explained in the ftc-docs page linked below.
|
||||||
|
* https://ftc-docs.firstinspires.org/apriltag-detection-values
|
||||||
|
*
|
||||||
* The driving goal is to rotate to keep the tag centered in the camera, while driving towards the tag to achieve the desired distance.
|
* The driving goal is to rotate to keep the tag centered in the camera, while driving towards the tag to achieve the desired distance.
|
||||||
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
|
* To reduce any motion blur (which will interrupt the detection process) the Camera exposure is reduced to a very low value (5mS)
|
||||||
* You can determine the best exposure and gain values by using the ConceptAprilTagOptimizeExposure OpMode in this Samples folder.
|
* You can determine the best exposure and gain values by using the ConceptAprilTagOptimizeExposure OpMode in this Samples folder.
|
||||||
@ -97,7 +104,7 @@ public class RobotAutoDriveToAprilTagTank extends LinearOpMode
|
|||||||
private DcMotor rightDrive = null; // Used to control the right drive wheel
|
private DcMotor rightDrive = null; // Used to control the right drive wheel
|
||||||
|
|
||||||
private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
|
private static final boolean USE_WEBCAM = true; // Set true to use a webcam, or false for a phone camera
|
||||||
private static final int DESIRED_TAG_ID = 0; // Choose the tag you want to approach or set to -1 for ANY tag.
|
private static final int DESIRED_TAG_ID = -1; // Choose the tag you want to approach or set to -1 for ANY tag.
|
||||||
private VisionPortal visionPortal; // Used to manage the video source.
|
private VisionPortal visionPortal; // Used to manage the video source.
|
||||||
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
|
private AprilTagProcessor aprilTag; // Used for managing the AprilTag detection process.
|
||||||
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
|
private AprilTagDetection desiredTag = null; // Used to hold the data for a detected AprilTag
|
||||||
@ -140,24 +147,32 @@ public class RobotAutoDriveToAprilTagTank extends LinearOpMode
|
|||||||
// Step through the list of detected tags and look for a matching tag
|
// Step through the list of detected tags and look for a matching tag
|
||||||
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
|
List<AprilTagDetection> currentDetections = aprilTag.getDetections();
|
||||||
for (AprilTagDetection detection : currentDetections) {
|
for (AprilTagDetection detection : currentDetections) {
|
||||||
if ((detection.metadata != null) &&
|
// Look to see if we have size info on this tag.
|
||||||
((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) ){
|
if (detection.metadata != null) {
|
||||||
targetFound = true;
|
// Check to see if we want to track towards this tag.
|
||||||
desiredTag = detection;
|
if ((DESIRED_TAG_ID < 0) || (detection.id == DESIRED_TAG_ID)) {
|
||||||
break; // don't look any further.
|
// Yes, we want to use this tag.
|
||||||
|
targetFound = true;
|
||||||
|
desiredTag = detection;
|
||||||
|
break; // don't look any further.
|
||||||
|
} else {
|
||||||
|
// This tag is in the library, but we do not want to track it right now.
|
||||||
|
telemetry.addData("Skipping", "Tag ID %d is not desired", detection.id);
|
||||||
|
}
|
||||||
} else {
|
} else {
|
||||||
telemetry.addData("Unknown Target", "Tag ID %d is not in TagLibrary\n", detection.id);
|
// This tag is NOT in the library, so we don't have enough information to track to it.
|
||||||
|
telemetry.addData("Unknown", "Tag ID %d is not in TagLibrary", detection.id);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Tell the driver what we see, and what to do.
|
// Tell the driver what we see, and what to do.
|
||||||
if (targetFound) {
|
if (targetFound) {
|
||||||
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
|
telemetry.addData("\n>","HOLD Left-Bumper to Drive to Target\n");
|
||||||
telemetry.addData("Target", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
|
telemetry.addData("Found", "ID %d (%s)", desiredTag.id, desiredTag.metadata.name);
|
||||||
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
|
telemetry.addData("Range", "%5.1f inches", desiredTag.ftcPose.range);
|
||||||
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
|
telemetry.addData("Bearing","%3.0f degrees", desiredTag.ftcPose.bearing);
|
||||||
} else {
|
} else {
|
||||||
telemetry.addData(">","Drive using joysticks to find valid target\n");
|
telemetry.addData("\n>","Drive using joysticks to find valid target\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
|
// If Left Bumper is being pressed, AND we have found the desired target, Drive to target Automatically .
|
||||||
@ -218,6 +233,15 @@ public class RobotAutoDriveToAprilTagTank extends LinearOpMode
|
|||||||
// Create the AprilTag processor by using a builder.
|
// Create the AprilTag processor by using a builder.
|
||||||
aprilTag = new AprilTagProcessor.Builder().build();
|
aprilTag = new AprilTagProcessor.Builder().build();
|
||||||
|
|
||||||
|
// Adjust Image Decimation to trade-off detection-range for detection-rate.
|
||||||
|
// eg: Some typical detection data using a Logitech C920 WebCam
|
||||||
|
// Decimation = 1 .. Detect 2" Tag from 10 feet away at 10 Frames per second
|
||||||
|
// Decimation = 2 .. Detect 2" Tag from 6 feet away at 22 Frames per second
|
||||||
|
// Decimation = 3 .. Detect 2" Tag from 4 feet away at 30 Frames Per Second
|
||||||
|
// Decimation = 3 .. Detect 5" Tag from 10 feet away at 30 Frames Per Second
|
||||||
|
// Note: Decimation can be changed on-the-fly to adapt during a match.
|
||||||
|
aprilTag.setDecimation(2);
|
||||||
|
|
||||||
// Create the vision portal by using a builder.
|
// Create the vision portal by using a builder.
|
||||||
if (USE_WEBCAM) {
|
if (USE_WEBCAM) {
|
||||||
visionPortal = new VisionPortal.Builder()
|
visionPortal = new VisionPortal.Builder()
|
||||||
|
@ -0,0 +1,78 @@
|
|||||||
|
/* Copyright (c) 2024 FIRST. All rights reserved.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||||
|
* the following conditions are met:
|
||||||
|
*
|
||||||
|
* Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
* of conditions and the following disclaimer.
|
||||||
|
*
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
* list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
* other materials provided with the distribution.
|
||||||
|
*
|
||||||
|
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||||
|
* promote products derived from this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||||
|
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||||
|
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||||
|
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||||
|
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
|
import com.qualcomm.robotcore.hardware.DigitalChannel;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This OpMode demonstrates how to use a digital channel.
|
||||||
|
*
|
||||||
|
* The OpMode assumes that the digital channel is configured with a name of "digitalTouch".
|
||||||
|
*
|
||||||
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||||
|
*/
|
||||||
|
@TeleOp(name = "Sensor: digital channel", group = "Sensor")
|
||||||
|
@Disabled
|
||||||
|
public class SensorDigitalTouch extends LinearOpMode {
|
||||||
|
DigitalChannel digitalTouch; // Digital channel Object
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void runOpMode() {
|
||||||
|
|
||||||
|
// get a reference to our touchSensor object.
|
||||||
|
digitalTouch = hardwareMap.get(DigitalChannel.class, "digitalTouch");
|
||||||
|
|
||||||
|
digitalTouch.setMode(DigitalChannel.Mode.INPUT);
|
||||||
|
telemetry.addData("DigitalTouchSensorExample", "Press start to continue...");
|
||||||
|
telemetry.update();
|
||||||
|
|
||||||
|
// wait for the start button to be pressed.
|
||||||
|
waitForStart();
|
||||||
|
|
||||||
|
// while the OpMode is active, loop and read the digital channel.
|
||||||
|
// Note we use opModeIsActive() as our loop condition because it is an interruptible method.
|
||||||
|
while (opModeIsActive()) {
|
||||||
|
|
||||||
|
// button is pressed if value returned is LOW or false.
|
||||||
|
// send the info back to driver station using telemetry function.
|
||||||
|
if (digitalTouch.getState() == false) {
|
||||||
|
telemetry.addData("Button", "PRESSED");
|
||||||
|
} else {
|
||||||
|
telemetry.addData("Button", "NOT PRESSED");
|
||||||
|
}
|
||||||
|
|
||||||
|
telemetry.update();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
52
README.md
52
README.md
@ -59,6 +59,58 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
|
|||||||
|
|
||||||
# Release Information
|
# Release Information
|
||||||
|
|
||||||
|
## Version 9.1 (20240215-115542)
|
||||||
|
|
||||||
|
### Enhancements
|
||||||
|
* Fixes a problem with Blocks: if the user closes a Block's warning balloon, it will still be closed next time the project is opened in the Blocks editor.
|
||||||
|
* In the Blocks editor, an alert concerning missing hardware devices is not shown if all the Blocks that use the missing hardware devices are disabled.
|
||||||
|
* Adds Blocks to support comparing property values CRServo.Direction, DCMotor.Direction, DCMotor.Mode, DCMotor.ZeroPowerBehavior, DigitalChannel.Mode, GyroSensor.HeadingMode, IrSeekerSensor.Mode, and Servo.Direction, to the corresponding enum Block.
|
||||||
|
* Improves OnBotJava auto-import to correctly import classes when used in certain situations.
|
||||||
|
* Improves OnBotJava autocomplete to provide better completion options in most cases.
|
||||||
|
* This fixes an issue where autocomplete would fail if a method with two or more formal parameters was defined.
|
||||||
|
* In OnBotJava, code folding support was added to expand and collapse code sections
|
||||||
|
* In OnBotJava, the copyright header is now automatically collapsed loading new files
|
||||||
|
* For all Blocks OpMode samples, intro comments have been moved to the RunOpMode comment balloon.
|
||||||
|
* The Clean up Blocks command in the Blocks editor now positions function Blocks so their comment balloons don't overlap other function Blocks.
|
||||||
|
* Added Blocks OpMode sample SensorTouch.
|
||||||
|
* Added Java OpMode sample SensorDigitalTouch.
|
||||||
|
* Several improvements to VisionPortal
|
||||||
|
* Adds option to control whether the stream is automatically started following a `.build()` call on a VisionPortal Builder
|
||||||
|
* Adds option to control whether the vision processing statistics overlay is rendered or not
|
||||||
|
* VisionPortals now implement the `CameraStreamSource` interface, allowing multiportal users to select which portal is routed to the DS in INIT by calling CameraStreamServer.getInstance().setSource(visionPortal). Can be selected via gamepad, between Camera Stream sessions.
|
||||||
|
* Add option to `AprilTagProcessor` to suppress calibration warnings
|
||||||
|
* Improves camera calibration warnings
|
||||||
|
* If a calibration is scaled, the resolution it was scaled from will be listed
|
||||||
|
* If calibrations exist with the wrong aspect ratio, the calibrated resolutions will be listed
|
||||||
|
* Fixes race condition which caused app crash when calling `stopStreaming()` immediately followed by `close()` on a VisionPortal
|
||||||
|
* Fixes IllegalStateException when calling `stopStreaming()` immediately after building a VisionPortal
|
||||||
|
* Added FTC Blocks counterparts to new Java methods:
|
||||||
|
* VisionPortal.Builder.setAutoStartStreamOnBuild
|
||||||
|
* VisionPortal.Builder.setShowStatsOverlay
|
||||||
|
* AprilTagProcessor.Builder.setSuppressCalibrationWarnings
|
||||||
|
* CameraStreamServer.setSource
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
* Fixes a problem where OnBotJava does not apply font size settings to the editor.
|
||||||
|
* Updates EasyOpenCV dependency to v1.7.1
|
||||||
|
* Fixes inability to use EasyOpenCV CameraFactory in OnBotJava
|
||||||
|
* Fixes entire RC app crash when user pipeline throws an exception
|
||||||
|
* Fixes entire RC app crash when user user canvas annotator throws an exception
|
||||||
|
* Use the modern stacktrace display when handling user exceptions instead of the legacy ESTOP telemetry message
|
||||||
|
|
||||||
|
## Version 9.0.1 (20230929-083754)
|
||||||
|
|
||||||
|
### Enhancements
|
||||||
|
* Updates AprilTag samples to include Decimation and additional Comments. Also corrects misleading tag ID warnings
|
||||||
|
* Increases maximum size of Blocks inline comments to 140 characters
|
||||||
|
* Adds Blocks sample BasicOmniOpMode.
|
||||||
|
* Updated CENTERSTAGE library AprilTag orientation quaternions
|
||||||
|
* Thanks [@FromenActual](https://github.com/FromenActual)
|
||||||
|
* Updated Java Sample ConceptTensorFlowObjectDetection.java to include missing elements needed for custom model support.
|
||||||
|
|
||||||
|
### Bug Fixes
|
||||||
|
* Fixes a problem where after October 1 the Driver Station will report as obsolete on v9.0 and prompt the user to update.
|
||||||
|
|
||||||
## Version 9.0 (20230830-154348)
|
## Version 9.0 (20230830-154348)
|
||||||
|
|
||||||
### Breaking Changes
|
### Breaking Changes
|
||||||
|
@ -0,0 +1,78 @@
|
|||||||
|
package org.firstinspires.ftc.teamcode;
|
||||||
|
|
||||||
|
import android.util.Size;
|
||||||
|
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
|
|
||||||
|
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||||
|
import org.firstinspires.ftc.vision.VisionPortal;
|
||||||
|
|
||||||
|
import java.util.Locale;
|
||||||
|
|
||||||
|
@TeleOp(name = "DevApp - Camera Test (HDMI)", group = "SCDS Development")
|
||||||
|
public class CometBotsCameraTest extends LinearOpMode {
|
||||||
|
|
||||||
|
final boolean USING_WEBCAM = true;
|
||||||
|
final BuiltinCameraDirection INTERNAL_CAM_DIR = BuiltinCameraDirection.FRONT;
|
||||||
|
final int RESOLUTION_WIDTH = 640;
|
||||||
|
final int RESOLUTION_HEIGHT = 480;
|
||||||
|
|
||||||
|
// Internal state
|
||||||
|
boolean lastX;
|
||||||
|
int frameCount;
|
||||||
|
long capReqTime;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void runOpMode() throws InterruptedException {
|
||||||
|
|
||||||
|
telemetry.speak("Development - Camera Test");
|
||||||
|
|
||||||
|
VisionPortal portal;
|
||||||
|
|
||||||
|
if (USING_WEBCAM)
|
||||||
|
{
|
||||||
|
portal = new VisionPortal.Builder()
|
||||||
|
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||||
|
.setCameraResolution(new Size(RESOLUTION_WIDTH, RESOLUTION_HEIGHT))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
portal = new VisionPortal.Builder()
|
||||||
|
.setCamera(INTERNAL_CAM_DIR)
|
||||||
|
.setCameraResolution(new Size(RESOLUTION_WIDTH, RESOLUTION_HEIGHT))
|
||||||
|
.build();
|
||||||
|
}
|
||||||
|
|
||||||
|
while (!isStopRequested())
|
||||||
|
{
|
||||||
|
boolean x = gamepad1.x;
|
||||||
|
|
||||||
|
if (x && !lastX)
|
||||||
|
{
|
||||||
|
portal.saveNextFrameRaw(String.format(Locale.US, "CameraFrameCapture-%06d", frameCount++));
|
||||||
|
capReqTime = System.currentTimeMillis();
|
||||||
|
}
|
||||||
|
|
||||||
|
lastX = x;
|
||||||
|
|
||||||
|
telemetry.addLine("######## Camera Capture Utility ########");
|
||||||
|
telemetry.addLine(String.format(Locale.US, " > Resolution: %dx%d", RESOLUTION_WIDTH, RESOLUTION_HEIGHT));
|
||||||
|
telemetry.addLine(" > Press X (or Square) to capture a frame");
|
||||||
|
telemetry.addData(" > Camera Status", portal.getCameraState());
|
||||||
|
|
||||||
|
if (capReqTime != 0)
|
||||||
|
{
|
||||||
|
telemetry.addLine("\nCaptured Frame!");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (capReqTime != 0 && System.currentTimeMillis() - capReqTime > 1000)
|
||||||
|
{
|
||||||
|
capReqTime = 0;
|
||||||
|
}
|
||||||
|
telemetry.update();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,72 @@
|
|||||||
|
package org.firstinspires.ftc.teamcode;
|
||||||
|
|
||||||
|
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
|
import com.qualcomm.robotcore.hardware.IMU;
|
||||||
|
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
|
||||||
|
|
||||||
|
@TeleOp(name = "DevApp - IMU Test", group = "SCDS Development")
|
||||||
|
public class CometBotsIMUTest extends LinearOpMode {
|
||||||
|
|
||||||
|
IMU imu;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void runOpMode() throws InterruptedException {
|
||||||
|
|
||||||
|
// Retrieve and initialize the IMU.
|
||||||
|
// This sample expects the IMU to be in a REV Hub and named "imu".
|
||||||
|
imu = hardwareMap.get(IMU.class, "imu");
|
||||||
|
|
||||||
|
/* Define how the hub is mounted on the robot to get the correct Yaw, Pitch and Roll values.
|
||||||
|
*
|
||||||
|
* Two input parameters are required to fully specify the Orientation.
|
||||||
|
* The first parameter specifies the direction the printed logo on the Hub is pointing.
|
||||||
|
* The second parameter specifies the direction the USB connector on the Hub is pointing.
|
||||||
|
* All directions are relative to the robot, and left/right is as-viewed from behind the robot.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* The next two lines define Hub orientation.
|
||||||
|
* The Default Orientation (shown) is when a hub is mounted horizontally with the printed logo pointing UP and the USB port pointing FORWARD.
|
||||||
|
*
|
||||||
|
* To Do: EDIT these two lines to match YOUR mounting configuration.
|
||||||
|
*/
|
||||||
|
RevHubOrientationOnRobot.LogoFacingDirection logoDirection = RevHubOrientationOnRobot.LogoFacingDirection.UP;
|
||||||
|
RevHubOrientationOnRobot.UsbFacingDirection usbDirection = RevHubOrientationOnRobot.UsbFacingDirection.FORWARD;
|
||||||
|
|
||||||
|
RevHubOrientationOnRobot orientationOnRobot = new RevHubOrientationOnRobot(logoDirection, usbDirection);
|
||||||
|
|
||||||
|
// Now initialize the IMU with this mounting orientation
|
||||||
|
// Note: if you choose two conflicting directions, this initialization will cause a code exception.
|
||||||
|
imu.initialize(new IMU.Parameters(orientationOnRobot));
|
||||||
|
|
||||||
|
// Loop and update the dashboard
|
||||||
|
while (!isStopRequested()) {
|
||||||
|
|
||||||
|
telemetry.addData("Hub orientation", "Logo=%s USB=%s\n ", logoDirection, usbDirection);
|
||||||
|
|
||||||
|
// Check to see if heading reset is requested
|
||||||
|
if (gamepad1.y) {
|
||||||
|
telemetry.addData("Yaw", "Resetting\n");
|
||||||
|
imu.resetYaw();
|
||||||
|
} else {
|
||||||
|
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n");
|
||||||
|
}
|
||||||
|
|
||||||
|
// Retrieve Rotational Angles and Velocities
|
||||||
|
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
|
||||||
|
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
|
||||||
|
|
||||||
|
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
|
||||||
|
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
|
||||||
|
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
|
||||||
|
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
|
||||||
|
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
|
||||||
|
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
|
||||||
|
telemetry.update();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,44 @@
|
|||||||
|
package org.firstinspires.ftc.teamcode;
|
||||||
|
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
|
|
||||||
|
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||||
|
import org.openftc.easyopencv.OpenCvCamera;
|
||||||
|
import org.openftc.easyopencv.OpenCvCameraFactory;
|
||||||
|
import org.openftc.easyopencv.OpenCvCameraRotation;
|
||||||
|
|
||||||
|
@Autonomous(name="DevApp - OpenCV Test", group="SCDS Development")
|
||||||
|
public class CometBotsOpenCVTest extends LinearOpMode {
|
||||||
|
|
||||||
|
private OpenCvCamera controlHubCam; // Use OpenCvCamera class from FTC SDK
|
||||||
|
private static final int CAMERA_WIDTH = 1280; // width of wanted camera resolution
|
||||||
|
private static final int CAMERA_HEIGHT = 720; // height of wanted camera resolution
|
||||||
|
|
||||||
|
// Calculate the distance using the formula
|
||||||
|
public static final double objectWidthInRealWorldUnits = 3.75; // Replace with the actual width of the object in real-world units
|
||||||
|
public static final double focalLength = 728; // Replace with the focal length of the camera in pixels
|
||||||
|
|
||||||
|
private void initOpenCV() {
|
||||||
|
|
||||||
|
// Create an instance of the camera
|
||||||
|
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
|
||||||
|
"cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||||
|
|
||||||
|
// Use OpenCvCameraFactory class from FTC SDK to create camera instance
|
||||||
|
controlHubCam = OpenCvCameraFactory.getInstance().createWebcam(
|
||||||
|
hardwareMap.get(WebcamName.class, "Webcam 1"), cameraMonitorViewId);
|
||||||
|
|
||||||
|
controlHubCam.setPipeline(new YellowBlobDetectionPipeline());
|
||||||
|
|
||||||
|
controlHubCam.openCameraDevice();
|
||||||
|
controlHubCam.startStreaming(CAMERA_WIDTH, CAMERA_HEIGHT, OpenCvCameraRotation.UPRIGHT);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void runOpMode() throws InterruptedException {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,88 @@
|
|||||||
|
package org.firstinspires.ftc.teamcode;
|
||||||
|
|
||||||
|
import static org.firstinspires.ftc.teamcode.cometbots.Constants.*;
|
||||||
|
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
|
import com.qualcomm.robotcore.hardware.ColorSensor;
|
||||||
|
import com.qualcomm.robotcore.hardware.DcMotor;
|
||||||
|
import com.qualcomm.robotcore.hardware.DistanceSensor;
|
||||||
|
import com.qualcomm.robotcore.hardware.TouchSensor;
|
||||||
|
|
||||||
|
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
|
||||||
|
import org.firstinspires.ftc.teamcode.cometbots.MovementLibrary;
|
||||||
|
|
||||||
|
@Autonomous(name = "Development Class", group = "[14493] CometBots")
|
||||||
|
public class CometBotsProject extends LinearOpMode {
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void runOpMode() throws InterruptedException {
|
||||||
|
|
||||||
|
DcMotor rhw = hardwareMap.get(DcMotor.class, "right hand wheel");
|
||||||
|
DcMotor rlw = hardwareMap.get(DcMotor.class, "right leg wheel");
|
||||||
|
DcMotor lhw = hardwareMap.get(DcMotor.class, "left hand wheel");
|
||||||
|
DcMotor llw = hardwareMap.get(DcMotor.class, "left leg wheel");
|
||||||
|
TouchSensor touchSensor = hardwareMap.get(TouchSensor.class, "touchsensor");
|
||||||
|
DistanceSensor distanceSensor = hardwareMap.get(DistanceSensor.class, "distancesensor");
|
||||||
|
ColorSensor colorSensor = hardwareMap.get(ColorSensor.class, "colorsensor");
|
||||||
|
|
||||||
|
/*
|
||||||
|
Put all motors in an array for easier handling
|
||||||
|
|
||||||
|
NOTE: Placement of motor in array is important
|
||||||
|
especially when dealing with functions that
|
||||||
|
set direction
|
||||||
|
*/
|
||||||
|
DcMotor[] motors = {rhw, rlw, lhw, llw};
|
||||||
|
|
||||||
|
/*
|
||||||
|
Initialize all wheels forward using motors array
|
||||||
|
*/
|
||||||
|
MovementLibrary.setDirectionForward(motors);
|
||||||
|
|
||||||
|
/*
|
||||||
|
Initialize wheel direction with base speed
|
||||||
|
*/
|
||||||
|
MovementLibrary.setSpeed(motors, CB_BASE_SPEED);
|
||||||
|
|
||||||
|
/*
|
||||||
|
Wait for the driver to hit START in the driver hub
|
||||||
|
*/
|
||||||
|
waitForStart();
|
||||||
|
|
||||||
|
while (opModeIsActive()) {
|
||||||
|
|
||||||
|
if (touchSensor.isPressed()) {
|
||||||
|
telemetry.speak("Touch sensor is pressed");
|
||||||
|
telemetry.addData("Touch sensor pressed?", touchSensor.isPressed());
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Distance traveled in inches
|
||||||
|
*/
|
||||||
|
double distanceInInches = distanceSensor.getDistance(DistanceUnit.INCH);
|
||||||
|
telemetry.addData("Distance traveled (inches)", "%.2f", distanceInInches);
|
||||||
|
|
||||||
|
/*
|
||||||
|
Capture color readout from color sensor
|
||||||
|
*/
|
||||||
|
telemetry.addData("Color Sensor [Red]", colorSensor.red());
|
||||||
|
telemetry.addData("Color Sensor [Green]", colorSensor.green());
|
||||||
|
telemetry.addData("Color Sensor [Blue]", colorSensor.blue());
|
||||||
|
telemetry.addData("Color Sensor [Alpha]", colorSensor.alpha());
|
||||||
|
|
||||||
|
/*
|
||||||
|
Speed up when going over an object that is blue-ish / green
|
||||||
|
*/
|
||||||
|
if (colorSensor.blue() > 800 && colorSensor.green() > 1100) {
|
||||||
|
telemetry.speak("Green speed boost");
|
||||||
|
MovementLibrary.setSpeed(motors, CB_BASE_SPEED * 2.0);
|
||||||
|
} else {
|
||||||
|
MovementLibrary.setSpeed(motors, CB_BASE_SPEED);
|
||||||
|
}
|
||||||
|
|
||||||
|
telemetry.update();
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,142 @@
|
|||||||
|
/* Copyright (c) 2019 FIRST. All rights reserved.
|
||||||
|
*
|
||||||
|
* Redistribution and use in source and binary forms, with or without modification,
|
||||||
|
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||||
|
* the following conditions are met:
|
||||||
|
*
|
||||||
|
* Redistributions of source code must retain the above copyright notice, this list
|
||||||
|
* of conditions and the following disclaimer.
|
||||||
|
*
|
||||||
|
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||||
|
* list of conditions and the following disclaimer in the documentation and/or
|
||||||
|
* other materials provided with the distribution.
|
||||||
|
*
|
||||||
|
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||||
|
* promote products derived from this software without specific prior written permission.
|
||||||
|
*
|
||||||
|
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||||
|
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||||
|
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||||
|
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||||
|
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||||
|
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||||
|
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||||
|
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||||
|
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.firstinspires.ftc.teamcode;
|
||||||
|
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||||
|
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||||
|
|
||||||
|
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||||
|
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||||
|
import org.firstinspires.ftc.vision.VisionPortal;
|
||||||
|
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
|
||||||
|
|
||||||
|
import java.util.List;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* This OpMode illustrates the basics of TensorFlow Object Detection, using
|
||||||
|
* the easiest way.
|
||||||
|
*
|
||||||
|
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||||
|
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||||
|
*/
|
||||||
|
@TeleOp(name = "Concept: TensorFlow Object Detection Easy", group = "Concept")
|
||||||
|
public class ConceptTensorFlowObjectDetectionEasy extends LinearOpMode {
|
||||||
|
|
||||||
|
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The variable to store our instance of the TensorFlow Object Detection processor.
|
||||||
|
*/
|
||||||
|
private TfodProcessor tfod;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The variable to store our instance of the vision portal.
|
||||||
|
*/
|
||||||
|
private VisionPortal visionPortal;
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void runOpMode() {
|
||||||
|
|
||||||
|
initTfod();
|
||||||
|
|
||||||
|
// Wait for the DS start button to be touched.
|
||||||
|
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
|
||||||
|
telemetry.addData(">", "Touch Play to start OpMode");
|
||||||
|
telemetry.update();
|
||||||
|
waitForStart();
|
||||||
|
|
||||||
|
if (opModeIsActive()) {
|
||||||
|
while (opModeIsActive()) {
|
||||||
|
|
||||||
|
telemetryTfod();
|
||||||
|
|
||||||
|
// Push telemetry to the Driver Station.
|
||||||
|
telemetry.update();
|
||||||
|
|
||||||
|
// Save CPU resources; can resume streaming when needed.
|
||||||
|
if (gamepad1.dpad_down) {
|
||||||
|
visionPortal.stopStreaming();
|
||||||
|
} else if (gamepad1.dpad_up) {
|
||||||
|
visionPortal.resumeStreaming();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Share the CPU.
|
||||||
|
sleep(20);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save more CPU resources when camera is no longer needed.
|
||||||
|
visionPortal.close();
|
||||||
|
|
||||||
|
} // end runOpMode()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Initialize the TensorFlow Object Detection processor.
|
||||||
|
*/
|
||||||
|
private void initTfod() {
|
||||||
|
|
||||||
|
// Create the TensorFlow processor the easy way.
|
||||||
|
tfod = TfodProcessor.easyCreateWithDefaults();
|
||||||
|
|
||||||
|
// Create the vision portal the easy way.
|
||||||
|
if (USE_WEBCAM) {
|
||||||
|
visionPortal = VisionPortal.easyCreateWithDefaults(
|
||||||
|
hardwareMap.get(WebcamName.class, "Webcam 1"), tfod);
|
||||||
|
} else {
|
||||||
|
visionPortal = VisionPortal.easyCreateWithDefaults(
|
||||||
|
BuiltinCameraDirection.BACK, tfod);
|
||||||
|
}
|
||||||
|
|
||||||
|
} // end method initTfod()
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add telemetry about TensorFlow Object Detection (TFOD) recognitions.
|
||||||
|
*/
|
||||||
|
private void telemetryTfod() {
|
||||||
|
|
||||||
|
List<Recognition> currentRecognitions = tfod.getRecognitions();
|
||||||
|
telemetry.addData("# Objects Detected", currentRecognitions.size());
|
||||||
|
|
||||||
|
// Step through the list of recognitions and display info for each one.
|
||||||
|
for (Recognition recognition : currentRecognitions) {
|
||||||
|
double x = (recognition.getLeft() + recognition.getRight()) / 2 ;
|
||||||
|
double y = (recognition.getTop() + recognition.getBottom()) / 2 ;
|
||||||
|
|
||||||
|
telemetry.addData(""," ");
|
||||||
|
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100);
|
||||||
|
telemetry.addData("- Position", "%.0f / %.0f", x, y);
|
||||||
|
telemetry.addData("- Size", "%.0f x %.0f", recognition.getWidth(), recognition.getHeight());
|
||||||
|
} // end for() loop
|
||||||
|
|
||||||
|
} // end method telemetryTfod()
|
||||||
|
|
||||||
|
} // end class
|
@ -0,0 +1,7 @@
|
|||||||
|
package org.firstinspires.ftc.teamcode.cometbots;
|
||||||
|
|
||||||
|
public class Constants {
|
||||||
|
|
||||||
|
public static double CB_BASE_SPEED = 0.2;
|
||||||
|
|
||||||
|
}
|
@ -0,0 +1,40 @@
|
|||||||
|
package org.firstinspires.ftc.teamcode.cometbots;
|
||||||
|
|
||||||
|
import com.qualcomm.robotcore.hardware.DcMotor;
|
||||||
|
|
||||||
|
public class MovementLibrary {
|
||||||
|
|
||||||
|
public static void setDirectionForward(DcMotor[] motors) {
|
||||||
|
motors[0].setDirection(DcMotor.Direction.REVERSE);
|
||||||
|
motors[1].setDirection(DcMotor.Direction.FORWARD);
|
||||||
|
motors[2].setDirection(DcMotor.Direction.FORWARD);
|
||||||
|
motors[3].setDirection(DcMotor.Direction.REVERSE);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setDirectionBackward(DcMotor[] motors) {
|
||||||
|
motors[0].setDirection(DcMotor.Direction.FORWARD);
|
||||||
|
motors[1].setDirection(DcMotor.Direction.REVERSE);
|
||||||
|
motors[2].setDirection(DcMotor.Direction.REVERSE);
|
||||||
|
motors[3].setDirection(DcMotor.Direction.FORWARD);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setSpeed(DcMotor[] motors, double speedValue) {
|
||||||
|
for (DcMotor motor : motors) {
|
||||||
|
motor.setPower(speedValue);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setDirectionRight(DcMotor[] motors) {
|
||||||
|
motors[0].setDirection(DcMotor.Direction.REVERSE);
|
||||||
|
motors[1].setDirection(DcMotor.Direction.REVERSE);
|
||||||
|
motors[2].setDirection(DcMotor.Direction.FORWARD);
|
||||||
|
motors[3].setDirection(DcMotor.Direction.FORWARD);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static void setDirectionLeft(DcMotor[] motors) {
|
||||||
|
motors[0].setDirection(DcMotor.Direction.FORWARD);
|
||||||
|
motors[1].setDirection(DcMotor.Direction.FORWARD);
|
||||||
|
motors[2].setDirection(DcMotor.Direction.REVERSE);
|
||||||
|
motors[3].setDirection(DcMotor.Direction.REVERSE);
|
||||||
|
}
|
||||||
|
}
|
@ -0,0 +1,4 @@
|
|||||||
|
https://www.youtube.com/watch?v=aeMWWvteF2U
|
||||||
|
|
||||||
|
https://www.youtube.com/watch?v=1xdo8QP6_mI
|
||||||
|
|
@ -1,21 +1,23 @@
|
|||||||
repositories {
|
repositories {
|
||||||
mavenCentral()
|
mavenCentral()
|
||||||
google() // Needed for androidx
|
google() // Needed for androidx
|
||||||
|
maven { url = 'https://maven.brott.dev/' }
|
||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation 'org.firstinspires.ftc:Inspection:9.0.0'
|
implementation 'org.firstinspires.ftc:Inspection:9.1.0'
|
||||||
implementation 'org.firstinspires.ftc:Blocks:9.0.0'
|
implementation 'org.firstinspires.ftc:Blocks:9.1.0'
|
||||||
implementation 'org.firstinspires.ftc:Tfod:9.0.0'
|
implementation 'org.firstinspires.ftc:Tfod:9.1.0'
|
||||||
implementation 'org.firstinspires.ftc:RobotCore:9.0.0'
|
implementation 'org.firstinspires.ftc:RobotCore:9.1.0'
|
||||||
implementation 'org.firstinspires.ftc:RobotServer:9.0.0'
|
implementation 'org.firstinspires.ftc:RobotServer:9.1.0'
|
||||||
implementation 'org.firstinspires.ftc:OnBotJava:9.0.0'
|
implementation 'org.firstinspires.ftc:OnBotJava:9.1.0'
|
||||||
implementation 'org.firstinspires.ftc:Hardware:9.0.0'
|
implementation 'org.firstinspires.ftc:Hardware:9.1.0'
|
||||||
implementation 'org.firstinspires.ftc:FtcCommon:9.0.0'
|
implementation 'org.firstinspires.ftc:FtcCommon:9.1.0'
|
||||||
implementation 'org.firstinspires.ftc:Vision:9.0.0'
|
implementation 'org.firstinspires.ftc:Vision:9.1.0'
|
||||||
implementation 'org.firstinspires.ftc:gameAssets-CenterStage:1.0.0'
|
implementation 'org.firstinspires.ftc:gameAssets-CenterStage:1.0.0'
|
||||||
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.4.3'
|
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.4.3'
|
||||||
runtimeOnly 'org.tensorflow:tensorflow-lite:2.12.0'
|
runtimeOnly 'org.tensorflow:tensorflow-lite:2.12.0'
|
||||||
implementation 'androidx.appcompat:appcompat:1.2.0'
|
implementation 'androidx.appcompat:appcompat:1.2.0'
|
||||||
|
implementation 'com.acmerobotics.dashboard:dashboard:0.4.15' // FTC Dashboard
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user