7 Commits

9 changed files with 477 additions and 0 deletions

View File

@ -0,0 +1,78 @@
package org.firstinspires.ftc.teamcode;
import android.util.Size;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import java.util.Locale;
@TeleOp(name = "DevApp - Camera Test (HDMI)", group = "SCDS Development")
public class CometBotsCameraTest extends LinearOpMode {
final boolean USING_WEBCAM = true;
final BuiltinCameraDirection INTERNAL_CAM_DIR = BuiltinCameraDirection.FRONT;
final int RESOLUTION_WIDTH = 640;
final int RESOLUTION_HEIGHT = 480;
// Internal state
boolean lastX;
int frameCount;
long capReqTime;
@Override
public void runOpMode() throws InterruptedException {
telemetry.speak("Development - Camera Test");
VisionPortal portal;
if (USING_WEBCAM)
{
portal = new VisionPortal.Builder()
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.setCameraResolution(new Size(RESOLUTION_WIDTH, RESOLUTION_HEIGHT))
.build();
}
else
{
portal = new VisionPortal.Builder()
.setCamera(INTERNAL_CAM_DIR)
.setCameraResolution(new Size(RESOLUTION_WIDTH, RESOLUTION_HEIGHT))
.build();
}
while (!isStopRequested())
{
boolean x = gamepad1.x;
if (x && !lastX)
{
portal.saveNextFrameRaw(String.format(Locale.US, "CameraFrameCapture-%06d", frameCount++));
capReqTime = System.currentTimeMillis();
}
lastX = x;
telemetry.addLine("######## Camera Capture Utility ########");
telemetry.addLine(String.format(Locale.US, " > Resolution: %dx%d", RESOLUTION_WIDTH, RESOLUTION_HEIGHT));
telemetry.addLine(" > Press X (or Square) to capture a frame");
telemetry.addData(" > Camera Status", portal.getCameraState());
if (capReqTime != 0)
{
telemetry.addLine("\nCaptured Frame!");
}
if (capReqTime != 0 && System.currentTimeMillis() - capReqTime > 1000)
{
capReqTime = 0;
}
telemetry.update();
}
}
}

View File

@ -0,0 +1,72 @@
package org.firstinspires.ftc.teamcode;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
@TeleOp(name = "DevApp - IMU Test", group = "SCDS Development")
public class CometBotsIMUTest extends LinearOpMode {
IMU imu;
@Override
public void runOpMode() throws InterruptedException {
// Retrieve and initialize the IMU.
// This sample expects the IMU to be in a REV Hub and named "imu".
imu = hardwareMap.get(IMU.class, "imu");
/* Define how the hub is mounted on the robot to get the correct Yaw, Pitch and Roll values.
*
* Two input parameters are required to fully specify the Orientation.
* The first parameter specifies the direction the printed logo on the Hub is pointing.
* The second parameter specifies the direction the USB connector on the Hub is pointing.
* All directions are relative to the robot, and left/right is as-viewed from behind the robot.
*/
/* The next two lines define Hub orientation.
* The Default Orientation (shown) is when a hub is mounted horizontally with the printed logo pointing UP and the USB port pointing FORWARD.
*
* To Do: EDIT these two lines to match YOUR mounting configuration.
*/
RevHubOrientationOnRobot.LogoFacingDirection logoDirection = RevHubOrientationOnRobot.LogoFacingDirection.UP;
RevHubOrientationOnRobot.UsbFacingDirection usbDirection = RevHubOrientationOnRobot.UsbFacingDirection.FORWARD;
RevHubOrientationOnRobot orientationOnRobot = new RevHubOrientationOnRobot(logoDirection, usbDirection);
// Now initialize the IMU with this mounting orientation
// Note: if you choose two conflicting directions, this initialization will cause a code exception.
imu.initialize(new IMU.Parameters(orientationOnRobot));
// Loop and update the dashboard
while (!isStopRequested()) {
telemetry.addData("Hub orientation", "Logo=%s USB=%s\n ", logoDirection, usbDirection);
// Check to see if heading reset is requested
if (gamepad1.y) {
telemetry.addData("Yaw", "Resetting\n");
imu.resetYaw();
} else {
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n");
}
// Retrieve Rotational Angles and Velocities
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
telemetry.update();
}
}
}

View File

@ -0,0 +1,44 @@
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.openftc.easyopencv.OpenCvCamera;
import org.openftc.easyopencv.OpenCvCameraFactory;
import org.openftc.easyopencv.OpenCvCameraRotation;
@Autonomous(name="DevApp - OpenCV Test", group="SCDS Development")
public class CometBotsOpenCVTest extends LinearOpMode {
private OpenCvCamera controlHubCam; // Use OpenCvCamera class from FTC SDK
private static final int CAMERA_WIDTH = 1280; // width of wanted camera resolution
private static final int CAMERA_HEIGHT = 720; // height of wanted camera resolution
// Calculate the distance using the formula
public static final double objectWidthInRealWorldUnits = 3.75; // Replace with the actual width of the object in real-world units
public static final double focalLength = 728; // Replace with the focal length of the camera in pixels
private void initOpenCV() {
// Create an instance of the camera
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
"cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
// Use OpenCvCameraFactory class from FTC SDK to create camera instance
controlHubCam = OpenCvCameraFactory.getInstance().createWebcam(
hardwareMap.get(WebcamName.class, "Webcam 1"), cameraMonitorViewId);
controlHubCam.setPipeline(new YellowBlobDetectionPipeline());
controlHubCam.openCameraDevice();
controlHubCam.startStreaming(CAMERA_WIDTH, CAMERA_HEIGHT, OpenCvCameraRotation.UPRIGHT);
}
@Override
public void runOpMode() throws InterruptedException {
}
}

View File

@ -0,0 +1,88 @@
package org.firstinspires.ftc.teamcode;
import static org.firstinspires.ftc.teamcode.cometbots.Constants.*;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.ColorSensor;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DistanceSensor;
import com.qualcomm.robotcore.hardware.TouchSensor;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.teamcode.cometbots.MovementLibrary;
@Autonomous(name = "Development Class", group = "[14493] CometBots")
public class CometBotsProject extends LinearOpMode {
@Override
public void runOpMode() throws InterruptedException {
DcMotor rhw = hardwareMap.get(DcMotor.class, "right hand wheel");
DcMotor rlw = hardwareMap.get(DcMotor.class, "right leg wheel");
DcMotor lhw = hardwareMap.get(DcMotor.class, "left hand wheel");
DcMotor llw = hardwareMap.get(DcMotor.class, "left leg wheel");
TouchSensor touchSensor = hardwareMap.get(TouchSensor.class, "touchsensor");
DistanceSensor distanceSensor = hardwareMap.get(DistanceSensor.class, "distancesensor");
ColorSensor colorSensor = hardwareMap.get(ColorSensor.class, "colorsensor");
/*
Put all motors in an array for easier handling
NOTE: Placement of motor in array is important
especially when dealing with functions that
set direction
*/
DcMotor[] motors = {rhw, rlw, lhw, llw};
/*
Initialize all wheels forward using motors array
*/
MovementLibrary.setDirectionForward(motors);
/*
Initialize wheel direction with base speed
*/
MovementLibrary.setSpeed(motors, CB_BASE_SPEED);
/*
Wait for the driver to hit START in the driver hub
*/
waitForStart();
while (opModeIsActive()) {
if (touchSensor.isPressed()) {
telemetry.speak("Touch sensor is pressed");
telemetry.addData("Touch sensor pressed?", touchSensor.isPressed());
}
/*
Distance traveled in inches
*/
double distanceInInches = distanceSensor.getDistance(DistanceUnit.INCH);
telemetry.addData("Distance traveled (inches)", "%.2f", distanceInInches);
/*
Capture color readout from color sensor
*/
telemetry.addData("Color Sensor [Red]", colorSensor.red());
telemetry.addData("Color Sensor [Green]", colorSensor.green());
telemetry.addData("Color Sensor [Blue]", colorSensor.blue());
telemetry.addData("Color Sensor [Alpha]", colorSensor.alpha());
/*
Speed up when going over an object that is blue-ish / green
*/
if (colorSensor.blue() > 800 && colorSensor.green() > 1100) {
telemetry.speak("Green speed boost");
MovementLibrary.setSpeed(motors, CB_BASE_SPEED * 2.0);
} else {
MovementLibrary.setSpeed(motors, CB_BASE_SPEED);
}
telemetry.update();
}
}
}

View File

@ -0,0 +1,142 @@
/* Copyright (c) 2019 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
import java.util.List;
/*
* This OpMode illustrates the basics of TensorFlow Object Detection, using
* the easiest way.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: TensorFlow Object Detection Easy", group = "Concept")
public class ConceptTensorFlowObjectDetectionEasy extends LinearOpMode {
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
/**
* The variable to store our instance of the TensorFlow Object Detection processor.
*/
private TfodProcessor tfod;
/**
* The variable to store our instance of the vision portal.
*/
private VisionPortal visionPortal;
@Override
public void runOpMode() {
initTfod();
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
if (opModeIsActive()) {
while (opModeIsActive()) {
telemetryTfod();
// Push telemetry to the Driver Station.
telemetry.update();
// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}
// Share the CPU.
sleep(20);
}
}
// Save more CPU resources when camera is no longer needed.
visionPortal.close();
} // end runOpMode()
/**
* Initialize the TensorFlow Object Detection processor.
*/
private void initTfod() {
// Create the TensorFlow processor the easy way.
tfod = TfodProcessor.easyCreateWithDefaults();
// Create the vision portal the easy way.
if (USE_WEBCAM) {
visionPortal = VisionPortal.easyCreateWithDefaults(
hardwareMap.get(WebcamName.class, "Webcam 1"), tfod);
} else {
visionPortal = VisionPortal.easyCreateWithDefaults(
BuiltinCameraDirection.BACK, tfod);
}
} // end method initTfod()
/**
* Add telemetry about TensorFlow Object Detection (TFOD) recognitions.
*/
private void telemetryTfod() {
List<Recognition> currentRecognitions = tfod.getRecognitions();
telemetry.addData("# Objects Detected", currentRecognitions.size());
// Step through the list of recognitions and display info for each one.
for (Recognition recognition : currentRecognitions) {
double x = (recognition.getLeft() + recognition.getRight()) / 2 ;
double y = (recognition.getTop() + recognition.getBottom()) / 2 ;
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100);
telemetry.addData("- Position", "%.0f / %.0f", x, y);
telemetry.addData("- Size", "%.0f x %.0f", recognition.getWidth(), recognition.getHeight());
} // end for() loop
} // end method telemetryTfod()
} // end class

View File

@ -0,0 +1,7 @@
package org.firstinspires.ftc.teamcode.cometbots;
public class Constants {
public static double CB_BASE_SPEED = 0.2;
}

View File

@ -0,0 +1,40 @@
package org.firstinspires.ftc.teamcode.cometbots;
import com.qualcomm.robotcore.hardware.DcMotor;
public class MovementLibrary {
public static void setDirectionForward(DcMotor[] motors) {
motors[0].setDirection(DcMotor.Direction.REVERSE);
motors[1].setDirection(DcMotor.Direction.FORWARD);
motors[2].setDirection(DcMotor.Direction.FORWARD);
motors[3].setDirection(DcMotor.Direction.REVERSE);
}
public static void setDirectionBackward(DcMotor[] motors) {
motors[0].setDirection(DcMotor.Direction.FORWARD);
motors[1].setDirection(DcMotor.Direction.REVERSE);
motors[2].setDirection(DcMotor.Direction.REVERSE);
motors[3].setDirection(DcMotor.Direction.FORWARD);
}
public static void setSpeed(DcMotor[] motors, double speedValue) {
for (DcMotor motor : motors) {
motor.setPower(speedValue);
}
}
public static void setDirectionRight(DcMotor[] motors) {
motors[0].setDirection(DcMotor.Direction.REVERSE);
motors[1].setDirection(DcMotor.Direction.REVERSE);
motors[2].setDirection(DcMotor.Direction.FORWARD);
motors[3].setDirection(DcMotor.Direction.FORWARD);
}
public static void setDirectionLeft(DcMotor[] motors) {
motors[0].setDirection(DcMotor.Direction.FORWARD);
motors[1].setDirection(DcMotor.Direction.FORWARD);
motors[2].setDirection(DcMotor.Direction.REVERSE);
motors[3].setDirection(DcMotor.Direction.REVERSE);
}
}

View File

@ -0,0 +1,4 @@
https://www.youtube.com/watch?v=aeMWWvteF2U
https://www.youtube.com/watch?v=1xdo8QP6_mI

View File

@ -1,6 +1,7 @@
repositories {
mavenCentral()
google() // Needed for androidx
maven { url = 'https://maven.brott.dev/' }
}
dependencies {
@ -17,5 +18,6 @@ dependencies {
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.4.3'
runtimeOnly 'org.tensorflow:tensorflow-lite:2.12.0'
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'com.acmerobotics.dashboard:dashboard:0.4.15' // FTC Dashboard
}