mirror of
https://github.com/trc492/FtcTemplate.git
synced 2025-07-01 21:11:23 -07:00
Updated to FTC SDK 10.1.
This commit is contained in:
@ -1,8 +1,8 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
android:versionCode="55"
|
||||
android:versionName="10.0">
|
||||
android:versionCode="56"
|
||||
android:versionName="10.1">
|
||||
|
||||
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
|
||||
|
||||
|
@ -58,6 +58,9 @@ import java.util.List;
|
||||
* When an AprilTag in the TagLibrary is detected, the SDK provides location and orientation of the robot, relative to the field origin.
|
||||
* This information is provided in the "robotPose" member of the returned "detection".
|
||||
*
|
||||
* To learn about the Field Coordinate System that is defined for FTC (and used by this OpMode), see the FTC-DOCS link below:
|
||||
* https://ftc-docs.firstinspires.org/en/latest/game_specific_resources/field_coordinate_system/field-coordinate-system.html
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*/
|
||||
|
@ -0,0 +1,191 @@
|
||||
/*
|
||||
* Copyright (c) 2024 Phil Malone
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import android.util.Size;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.util.SortOrder;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.Telemetry;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.opencv.ColorBlobLocatorProcessor;
|
||||
import org.firstinspires.ftc.vision.opencv.ColorRange;
|
||||
import org.firstinspires.ftc.vision.opencv.ImageRegion;
|
||||
import org.opencv.core.RotatedRect;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
/*
|
||||
* This OpMode illustrates how to use a video source (camera) to locate specifically colored regions
|
||||
*
|
||||
* Unlike a "color sensor" which determines the color of an object in the field of view, this "color locator"
|
||||
* will search the Region Of Interest (ROI) in a camera image, and find any "blobs" of color that match the requested color range.
|
||||
* These blobs can be further filtered and sorted to find the one most likely to be the item the user is looking for.
|
||||
*
|
||||
* To perform this function, a VisionPortal runs a ColorBlobLocatorProcessor process.
|
||||
* The ColorBlobLocatorProcessor process is created first, and then the VisionPortal is built to use this process.
|
||||
* The ColorBlobLocatorProcessor analyses the ROI and locates pixels that match the ColorRange to form a "mask".
|
||||
* The matching pixels are then collected into contiguous "blobs" of pixels. The outer boundaries of these blobs are called its "contour".
|
||||
* For each blob, the process then creates the smallest possible rectangle "boxFit" that will fully encase the contour.
|
||||
* The user can then call getBlobs() to retrieve the list of Blobs, where each Blob contains the contour and the boxFit data.
|
||||
* Note: The default sort order for Blobs is ContourArea, in descending order, so the biggest contours are listed first.
|
||||
*
|
||||
* To aid the user, a colored boxFit rectangle is drawn on the camera preview to show the location of each Blob
|
||||
* The original Blob contour can also be added to the preview. This is helpful when configuring the ColorBlobLocatorProcessor parameters.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||
*/
|
||||
|
||||
@Disabled
|
||||
@TeleOp(name = "Concept: Vision Color-Locator", group = "Concept")
|
||||
public class ConceptVisionColorLocator extends LinearOpMode
|
||||
{
|
||||
@Override
|
||||
public void runOpMode()
|
||||
{
|
||||
/* Build a "Color Locator" vision processor based on the ColorBlobLocatorProcessor class.
|
||||
* - Specify the color range you are looking for. You can use a predefined color, or create you own color range
|
||||
* .setTargetColorRange(ColorRange.BLUE) // use a predefined color match
|
||||
* Available predefined colors are: RED, BLUE YELLOW GREEN
|
||||
* .setTargetColorRange(new ColorRange(ColorSpace.YCrCb, // or define your own color match
|
||||
* new Scalar( 32, 176, 0),
|
||||
* new Scalar(255, 255, 132)))
|
||||
*
|
||||
* - Focus the color locator by defining a RegionOfInterest (ROI) which you want to search.
|
||||
* This can be the entire frame, or a sub-region defined using:
|
||||
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
|
||||
* Use one form of the ImageRegion class to define the ROI.
|
||||
* ImageRegion.entireFrame()
|
||||
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner
|
||||
* ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5) 50% width/height square centered on screen
|
||||
*
|
||||
* - Define which contours are included.
|
||||
* You can get ALL the contours, or you can skip any contours that are completely inside another contour.
|
||||
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.ALL_FLATTENED_HIERARCHY) // return all contours
|
||||
* .setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude contours inside other contours
|
||||
* note: EXTERNAL_ONLY helps to avoid bright reflection spots from breaking up areas of solid color.
|
||||
*
|
||||
* - turn the display of contours ON or OFF. Turning this on helps debugging but takes up valuable CPU time.
|
||||
* .setDrawContours(true)
|
||||
*
|
||||
* - include any pre-processing of the image or mask before looking for Blobs.
|
||||
* There are some extra processing you can include to improve the formation of blobs. Using these features requires
|
||||
* an understanding of how they may effect the final blobs. The "pixels" argument sets the NxN kernel size.
|
||||
* .setBlurSize(int pixels) Blurring an image helps to provide a smooth color transition between objects, and smoother contours.
|
||||
* The higher the number of pixels, the more blurred the image becomes.
|
||||
* Note: Even "pixels" values will be incremented to satisfy the "odd number" requirement.
|
||||
* Blurring too much may hide smaller features. A "pixels" size of 5 is good for a 320x240 image.
|
||||
* .setErodeSize(int pixels) Erosion removes floating pixels and thin lines so that only substantive objects remain.
|
||||
* Erosion can grow holes inside regions, and also shrink objects.
|
||||
* "pixels" in the range of 2-4 are suitable for low res images.
|
||||
* .setDilateSize(int pixels) Dilation makes objects more visible by filling in small holes, making lines appear thicker,
|
||||
* and making filled shapes appear larger. Dilation is useful for joining broken parts of an
|
||||
* object, such as when removing noise from an image.
|
||||
* "pixels" in the range of 2-4 are suitable for low res images.
|
||||
*/
|
||||
ColorBlobLocatorProcessor colorLocator = new ColorBlobLocatorProcessor.Builder()
|
||||
.setTargetColorRange(ColorRange.BLUE) // use a predefined color match
|
||||
.setContourMode(ColorBlobLocatorProcessor.ContourMode.EXTERNAL_ONLY) // exclude blobs inside blobs
|
||||
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.5, 0.5, 0.5, -0.5)) // search central 1/4 of camera view
|
||||
.setDrawContours(true) // Show contours on the Stream Preview
|
||||
.setBlurSize(5) // Smooth the transitions between different colors in image
|
||||
.build();
|
||||
|
||||
/*
|
||||
* Build a vision portal to run the Color Locator process.
|
||||
*
|
||||
* - Add the colorLocator process created above.
|
||||
* - Set the desired video resolution.
|
||||
* Since a high resolution will not improve this process, choose a lower resolution that is
|
||||
* supported by your camera. This will improve overall performance and reduce latency.
|
||||
* - Choose your video source. This may be
|
||||
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
|
||||
* or
|
||||
* .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera
|
||||
*/
|
||||
VisionPortal portal = new VisionPortal.Builder()
|
||||
.addProcessor(colorLocator)
|
||||
.setCameraResolution(new Size(320, 240))
|
||||
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||
.build();
|
||||
|
||||
telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging.
|
||||
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
|
||||
|
||||
// WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode.
|
||||
while (opModeIsActive() || opModeInInit())
|
||||
{
|
||||
telemetry.addData("preview on/off", "... Camera Stream\n");
|
||||
|
||||
// Read the current list
|
||||
List<ColorBlobLocatorProcessor.Blob> blobs = colorLocator.getBlobs();
|
||||
|
||||
/*
|
||||
* The list of Blobs can be filtered to remove unwanted Blobs.
|
||||
* Note: All contours will be still displayed on the Stream Preview, but only those that satisfy the filter
|
||||
* conditions will remain in the current list of "blobs". Multiple filters may be used.
|
||||
*
|
||||
* Use any of the following filters.
|
||||
*
|
||||
* ColorBlobLocatorProcessor.Util.filterByArea(minArea, maxArea, blobs);
|
||||
* A Blob's area is the number of pixels contained within the Contour. Filter out any that are too big or small.
|
||||
* Start with a large range and then refine the range based on the likely size of the desired object in the viewfinder.
|
||||
*
|
||||
* ColorBlobLocatorProcessor.Util.filterByDensity(minDensity, maxDensity, blobs);
|
||||
* A blob's density is an indication of how "full" the contour is.
|
||||
* If you put a rubber band around the contour you would get the "Convex Hull" of the contour.
|
||||
* The density is the ratio of Contour-area to Convex Hull-area.
|
||||
*
|
||||
* ColorBlobLocatorProcessor.Util.filterByAspectRatio(minAspect, maxAspect, blobs);
|
||||
* A blob's Aspect ratio is the ratio of boxFit long side to short side.
|
||||
* A perfect Square has an aspect ratio of 1. All others are > 1
|
||||
*/
|
||||
ColorBlobLocatorProcessor.Util.filterByArea(50, 20000, blobs); // filter out very small blobs.
|
||||
|
||||
/*
|
||||
* The list of Blobs can be sorted using the same Blob attributes as listed above.
|
||||
* No more than one sort call should be made. Sorting can use ascending or descending order.
|
||||
* ColorBlobLocatorProcessor.Util.sortByArea(SortOrder.DESCENDING, blobs); // Default
|
||||
* ColorBlobLocatorProcessor.Util.sortByDensity(SortOrder.DESCENDING, blobs);
|
||||
* ColorBlobLocatorProcessor.Util.sortByAspectRatio(SortOrder.DESCENDING, blobs);
|
||||
*/
|
||||
|
||||
telemetry.addLine(" Area Density Aspect Center");
|
||||
|
||||
// Display the size (area) and center location for each Blob.
|
||||
for(ColorBlobLocatorProcessor.Blob b : blobs)
|
||||
{
|
||||
RotatedRect boxFit = b.getBoxFit();
|
||||
telemetry.addLine(String.format("%5d %4.2f %5.2f (%3d,%3d)",
|
||||
b.getContourArea(), b.getDensity(), b.getAspectRatio(), (int) boxFit.center.x, (int) boxFit.center.y));
|
||||
}
|
||||
|
||||
telemetry.update();
|
||||
sleep(50);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,136 @@
|
||||
/*
|
||||
* Copyright (c) 2024 Phil Malone
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import android.graphics.Color;
|
||||
import android.util.Size;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.vision.VisionPortal;
|
||||
import org.firstinspires.ftc.vision.opencv.ImageRegion;
|
||||
import org.firstinspires.ftc.vision.opencv.PredominantColorProcessor;
|
||||
|
||||
/*
|
||||
* This OpMode illustrates how to use a video source (camera) as a color sensor
|
||||
*
|
||||
* A "color sensor" will typically determine the color of the object that it is pointed at.
|
||||
*
|
||||
* This sample performs the same function, except it uses a video camera to inspect an object or scene.
|
||||
* The user may choose to inspect all, or just a Region of Interest (ROI), of the active camera view.
|
||||
* The user must also provide a list of "acceptable colors" (Swatches) from which the closest matching color will be selected.
|
||||
*
|
||||
* To perform this function, a VisionPortal runs a PredominantColorProcessor process.
|
||||
* The PredominantColorProcessor process is created first, and then the VisionPortal is built to use this process.
|
||||
* The PredominantColorProcessor analyses the ROI and splits the colored pixels into several color-clusters.
|
||||
* The largest of these clusters is then considered to be the "Predominant Color"
|
||||
* The process then matches the Predominant Color with the closest Swatch and returns that match.
|
||||
*
|
||||
* To aid the user, a colored rectangle is drawn on the camera preview to show the RegionOfInterest,
|
||||
* The Predominant Color is used to paint the rectangle border, so the user can verify that the color is reasonable.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list
|
||||
*/
|
||||
|
||||
@Disabled
|
||||
@TeleOp(name = "Concept: Vision Color-Sensor", group = "Concept")
|
||||
public class ConceptVisionColorSensor extends LinearOpMode
|
||||
{
|
||||
@Override
|
||||
public void runOpMode()
|
||||
{
|
||||
/* Build a "Color Sensor" vision processor based on the PredominantColorProcessor class.
|
||||
*
|
||||
* - Focus the color sensor by defining a RegionOfInterest (ROI) which you want to inspect.
|
||||
* This can be the entire frame, or a sub-region defined using:
|
||||
* 1) standard image coordinates or 2) a normalized +/- 1.0 coordinate system.
|
||||
* Use one form of the ImageRegion class to define the ROI.
|
||||
* ImageRegion.entireFrame()
|
||||
* ImageRegion.asImageCoordinates(50, 50, 150, 150) 100x100 pixel square near the upper left corner
|
||||
* ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1) 10% width/height square centered on screen
|
||||
*
|
||||
* - Set the list of "acceptable" color swatches (matches).
|
||||
* Only colors that you assign here will be returned.
|
||||
* If you know the sensor will be pointing to one of a few specific colors, enter them here.
|
||||
* Or, if the sensor may be pointed randomly, provide some additional colors that may match the surrounding.
|
||||
* Possible choices are:
|
||||
* RED, ORANGE, YELLOW, GREEN, CYAN, BLUE, PURPLE, MAGENTA, BLACK, WHITE;
|
||||
*
|
||||
* Note that in the example shown below, only some of the available colors are included.
|
||||
* This will force any other colored region into one of these colors.
|
||||
* eg: Green may be reported as YELLOW, as this may be the "closest" match.
|
||||
*/
|
||||
PredominantColorProcessor colorSensor = new PredominantColorProcessor.Builder()
|
||||
.setRoi(ImageRegion.asUnityCenterCoordinates(-0.1, 0.1, 0.1, -0.1))
|
||||
.setSwatches(
|
||||
PredominantColorProcessor.Swatch.RED,
|
||||
PredominantColorProcessor.Swatch.BLUE,
|
||||
PredominantColorProcessor.Swatch.YELLOW,
|
||||
PredominantColorProcessor.Swatch.BLACK,
|
||||
PredominantColorProcessor.Swatch.WHITE)
|
||||
.build();
|
||||
|
||||
/*
|
||||
* Build a vision portal to run the Color Sensor process.
|
||||
*
|
||||
* - Add the colorSensor process created above.
|
||||
* - Set the desired video resolution.
|
||||
* Since a high resolution will not improve this process, choose a lower resolution that is
|
||||
* supported by your camera. This will improve overall performance and reduce latency.
|
||||
* - Choose your video source. This may be
|
||||
* .setCamera(hardwareMap.get(WebcamName.class, "Webcam 1")) ..... for a webcam
|
||||
* or
|
||||
* .setCamera(BuiltinCameraDirection.BACK) ... for a Phone Camera
|
||||
*/
|
||||
VisionPortal portal = new VisionPortal.Builder()
|
||||
.addProcessor(colorSensor)
|
||||
.setCameraResolution(new Size(320, 240))
|
||||
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
|
||||
.build();
|
||||
|
||||
telemetry.setMsTransmissionInterval(50); // Speed up telemetry updates, Just use for debugging.
|
||||
|
||||
// WARNING: To be able to view the stream preview on the Driver Station, this code runs in INIT mode.
|
||||
while (opModeIsActive() || opModeInInit())
|
||||
{
|
||||
telemetry.addData("DS preview on/off", "3 dots, Camera Stream\n");
|
||||
|
||||
// Request the most recent color analysis.
|
||||
// This will return the closest matching colorSwatch and the predominant RGB color.
|
||||
// Note: to take actions based on the detected color, simply use the colorSwatch in a comparison or switch.
|
||||
// eg:
|
||||
// if (result.closestSwatch == PredominantColorProcessor.Swatch.RED) {... some code ...}
|
||||
PredominantColorProcessor.Result result = colorSensor.getAnalysis();
|
||||
|
||||
// Display the Color Sensor result.
|
||||
telemetry.addData("Best Match:", result.closestSwatch);
|
||||
telemetry.addLine(String.format("R %3d, G %3d, B %3d", Color.red(result.rgb), Color.green(result.rgb), Color.blue(result.rgb)));
|
||||
telemetry.update();
|
||||
|
||||
sleep(20);
|
||||
}
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user