9 Commits

14 changed files with 607 additions and 11 deletions

View File

@ -1,8 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:versionCode="52"
android:versionName="9.0.1">
android:versionCode="53"
android:versionName="9.1">
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />

View File

@ -29,6 +29,7 @@
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.util.Size;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;

View File

@ -29,6 +29,7 @@
package org.firstinspires.ftc.robotcontroller.external.samples;
import android.util.Size;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;

View File

@ -0,0 +1,78 @@
/* Copyright (c) 2024 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.robotcontroller.external.samples;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.DigitalChannel;
/*
* This OpMode demonstrates how to use a digital channel.
*
* The OpMode assumes that the digital channel is configured with a name of "digitalTouch".
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Sensor: digital channel", group = "Sensor")
@Disabled
public class SensorDigitalTouch extends LinearOpMode {
DigitalChannel digitalTouch; // Digital channel Object
@Override
public void runOpMode() {
// get a reference to our touchSensor object.
digitalTouch = hardwareMap.get(DigitalChannel.class, "digitalTouch");
digitalTouch.setMode(DigitalChannel.Mode.INPUT);
telemetry.addData("DigitalTouchSensorExample", "Press start to continue...");
telemetry.update();
// wait for the start button to be pressed.
waitForStart();
// while the OpMode is active, loop and read the digital channel.
// Note we use opModeIsActive() as our loop condition because it is an interruptible method.
while (opModeIsActive()) {
// button is pressed if value returned is LOW or false.
// send the info back to driver station using telemetry function.
if (digitalTouch.getState() == false) {
telemetry.addData("Button", "PRESSED");
} else {
telemetry.addData("Button", "NOT PRESSED");
}
telemetry.update();
}
}
}

View File

@ -59,6 +59,45 @@ The readme.md file located in the [/TeamCode/src/main/java/org/firstinspires/ftc
# Release Information
## Version 9.1 (20240215-115542)
### Enhancements
* Fixes a problem with Blocks: if the user closes a Block's warning balloon, it will still be closed next time the project is opened in the Blocks editor.
* In the Blocks editor, an alert concerning missing hardware devices is not shown if all the Blocks that use the missing hardware devices are disabled.
* Adds Blocks to support comparing property values CRServo.Direction, DCMotor.Direction, DCMotor.Mode, DCMotor.ZeroPowerBehavior, DigitalChannel.Mode, GyroSensor.HeadingMode, IrSeekerSensor.Mode, and Servo.Direction, to the corresponding enum Block.
* Improves OnBotJava auto-import to correctly import classes when used in certain situations.
* Improves OnBotJava autocomplete to provide better completion options in most cases.
* This fixes an issue where autocomplete would fail if a method with two or more formal parameters was defined.
* In OnBotJava, code folding support was added to expand and collapse code sections
* In OnBotJava, the copyright header is now automatically collapsed loading new files
* For all Blocks OpMode samples, intro comments have been moved to the RunOpMode comment balloon.
* The Clean up Blocks command in the Blocks editor now positions function Blocks so their comment balloons don't overlap other function Blocks.
* Added Blocks OpMode sample SensorTouch.
* Added Java OpMode sample SensorDigitalTouch.
* Several improvements to VisionPortal
* Adds option to control whether the stream is automatically started following a `.build()` call on a VisionPortal Builder
* Adds option to control whether the vision processing statistics overlay is rendered or not
* VisionPortals now implement the `CameraStreamSource` interface, allowing multiportal users to select which portal is routed to the DS in INIT by calling CameraStreamServer.getInstance().setSource(visionPortal). Can be selected via gamepad, between Camera Stream sessions.
* Add option to `AprilTagProcessor` to suppress calibration warnings
* Improves camera calibration warnings
* If a calibration is scaled, the resolution it was scaled from will be listed
* If calibrations exist with the wrong aspect ratio, the calibrated resolutions will be listed
* Fixes race condition which caused app crash when calling `stopStreaming()` immediately followed by `close()` on a VisionPortal
* Fixes IllegalStateException when calling `stopStreaming()` immediately after building a VisionPortal
* Added FTC Blocks counterparts to new Java methods:
* VisionPortal.Builder.setAutoStartStreamOnBuild
* VisionPortal.Builder.setShowStatsOverlay
* AprilTagProcessor.Builder.setSuppressCalibrationWarnings
* CameraStreamServer.setSource
### Bug Fixes
* Fixes a problem where OnBotJava does not apply font size settings to the editor.
* Updates EasyOpenCV dependency to v1.7.1
* Fixes inability to use EasyOpenCV CameraFactory in OnBotJava
* Fixes entire RC app crash when user pipeline throws an exception
* Fixes entire RC app crash when user user canvas annotator throws an exception
* Use the modern stacktrace display when handling user exceptions instead of the legacy ESTOP telemetry message
## Version 9.0.1 (20230929-083754)
### Enhancements

View File

@ -0,0 +1,78 @@
package org.firstinspires.ftc.teamcode;
import android.util.Size;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.vision.VisionPortal;
import java.util.Locale;
@TeleOp(name = "DevApp - Camera Test (HDMI)", group = "SCDS Development")
public class CometBotsCameraTest extends LinearOpMode {
final boolean USING_WEBCAM = true;
final BuiltinCameraDirection INTERNAL_CAM_DIR = BuiltinCameraDirection.FRONT;
final int RESOLUTION_WIDTH = 640;
final int RESOLUTION_HEIGHT = 480;
// Internal state
boolean lastX;
int frameCount;
long capReqTime;
@Override
public void runOpMode() throws InterruptedException {
telemetry.speak("Development - Camera Test");
VisionPortal portal;
if (USING_WEBCAM)
{
portal = new VisionPortal.Builder()
.setCamera(hardwareMap.get(WebcamName.class, "Webcam 1"))
.setCameraResolution(new Size(RESOLUTION_WIDTH, RESOLUTION_HEIGHT))
.build();
}
else
{
portal = new VisionPortal.Builder()
.setCamera(INTERNAL_CAM_DIR)
.setCameraResolution(new Size(RESOLUTION_WIDTH, RESOLUTION_HEIGHT))
.build();
}
while (!isStopRequested())
{
boolean x = gamepad1.x;
if (x && !lastX)
{
portal.saveNextFrameRaw(String.format(Locale.US, "CameraFrameCapture-%06d", frameCount++));
capReqTime = System.currentTimeMillis();
}
lastX = x;
telemetry.addLine("######## Camera Capture Utility ########");
telemetry.addLine(String.format(Locale.US, " > Resolution: %dx%d", RESOLUTION_WIDTH, RESOLUTION_HEIGHT));
telemetry.addLine(" > Press X (or Square) to capture a frame");
telemetry.addData(" > Camera Status", portal.getCameraState());
if (capReqTime != 0)
{
telemetry.addLine("\nCaptured Frame!");
}
if (capReqTime != 0 && System.currentTimeMillis() - capReqTime > 1000)
{
capReqTime = 0;
}
telemetry.update();
}
}
}

View File

@ -0,0 +1,72 @@
package org.firstinspires.ftc.teamcode;
import com.qualcomm.hardware.rev.RevHubOrientationOnRobot;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import com.qualcomm.robotcore.hardware.IMU;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
import org.firstinspires.ftc.robotcore.external.navigation.AngularVelocity;
import org.firstinspires.ftc.robotcore.external.navigation.YawPitchRollAngles;
@TeleOp(name = "DevApp - IMU Test", group = "SCDS Development")
public class CometBotsIMUTest extends LinearOpMode {
IMU imu;
@Override
public void runOpMode() throws InterruptedException {
// Retrieve and initialize the IMU.
// This sample expects the IMU to be in a REV Hub and named "imu".
imu = hardwareMap.get(IMU.class, "imu");
/* Define how the hub is mounted on the robot to get the correct Yaw, Pitch and Roll values.
*
* Two input parameters are required to fully specify the Orientation.
* The first parameter specifies the direction the printed logo on the Hub is pointing.
* The second parameter specifies the direction the USB connector on the Hub is pointing.
* All directions are relative to the robot, and left/right is as-viewed from behind the robot.
*/
/* The next two lines define Hub orientation.
* The Default Orientation (shown) is when a hub is mounted horizontally with the printed logo pointing UP and the USB port pointing FORWARD.
*
* To Do: EDIT these two lines to match YOUR mounting configuration.
*/
RevHubOrientationOnRobot.LogoFacingDirection logoDirection = RevHubOrientationOnRobot.LogoFacingDirection.UP;
RevHubOrientationOnRobot.UsbFacingDirection usbDirection = RevHubOrientationOnRobot.UsbFacingDirection.FORWARD;
RevHubOrientationOnRobot orientationOnRobot = new RevHubOrientationOnRobot(logoDirection, usbDirection);
// Now initialize the IMU with this mounting orientation
// Note: if you choose two conflicting directions, this initialization will cause a code exception.
imu.initialize(new IMU.Parameters(orientationOnRobot));
// Loop and update the dashboard
while (!isStopRequested()) {
telemetry.addData("Hub orientation", "Logo=%s USB=%s\n ", logoDirection, usbDirection);
// Check to see if heading reset is requested
if (gamepad1.y) {
telemetry.addData("Yaw", "Resetting\n");
imu.resetYaw();
} else {
telemetry.addData("Yaw", "Press Y (triangle) on Gamepad to reset\n");
}
// Retrieve Rotational Angles and Velocities
YawPitchRollAngles orientation = imu.getRobotYawPitchRollAngles();
AngularVelocity angularVelocity = imu.getRobotAngularVelocity(AngleUnit.DEGREES);
telemetry.addData("Yaw (Z)", "%.2f Deg. (Heading)", orientation.getYaw(AngleUnit.DEGREES));
telemetry.addData("Pitch (X)", "%.2f Deg.", orientation.getPitch(AngleUnit.DEGREES));
telemetry.addData("Roll (Y)", "%.2f Deg.\n", orientation.getRoll(AngleUnit.DEGREES));
telemetry.addData("Yaw (Z) velocity", "%.2f Deg/Sec", angularVelocity.zRotationRate);
telemetry.addData("Pitch (X) velocity", "%.2f Deg/Sec", angularVelocity.xRotationRate);
telemetry.addData("Roll (Y) velocity", "%.2f Deg/Sec", angularVelocity.yRotationRate);
telemetry.update();
}
}
}

View File

@ -0,0 +1,44 @@
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.openftc.easyopencv.OpenCvCamera;
import org.openftc.easyopencv.OpenCvCameraFactory;
import org.openftc.easyopencv.OpenCvCameraRotation;
@Autonomous(name="DevApp - OpenCV Test", group="SCDS Development")
public class CometBotsOpenCVTest extends LinearOpMode {
private OpenCvCamera controlHubCam; // Use OpenCvCamera class from FTC SDK
private static final int CAMERA_WIDTH = 1280; // width of wanted camera resolution
private static final int CAMERA_HEIGHT = 720; // height of wanted camera resolution
// Calculate the distance using the formula
public static final double objectWidthInRealWorldUnits = 3.75; // Replace with the actual width of the object in real-world units
public static final double focalLength = 728; // Replace with the focal length of the camera in pixels
private void initOpenCV() {
// Create an instance of the camera
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier(
"cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
// Use OpenCvCameraFactory class from FTC SDK to create camera instance
controlHubCam = OpenCvCameraFactory.getInstance().createWebcam(
hardwareMap.get(WebcamName.class, "Webcam 1"), cameraMonitorViewId);
controlHubCam.setPipeline(new YellowBlobDetectionPipeline());
controlHubCam.openCameraDevice();
controlHubCam.startStreaming(CAMERA_WIDTH, CAMERA_HEIGHT, OpenCvCameraRotation.UPRIGHT);
}
@Override
public void runOpMode() throws InterruptedException {
}
}

View File

@ -0,0 +1,88 @@
package org.firstinspires.ftc.teamcode;
import static org.firstinspires.ftc.teamcode.cometbots.Constants.*;
import com.qualcomm.robotcore.eventloop.opmode.Autonomous;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.hardware.ColorSensor;
import com.qualcomm.robotcore.hardware.DcMotor;
import com.qualcomm.robotcore.hardware.DistanceSensor;
import com.qualcomm.robotcore.hardware.TouchSensor;
import org.firstinspires.ftc.robotcore.external.navigation.DistanceUnit;
import org.firstinspires.ftc.teamcode.cometbots.MovementLibrary;
@Autonomous(name = "Development Class", group = "[14493] CometBots")
public class CometBotsProject extends LinearOpMode {
@Override
public void runOpMode() throws InterruptedException {
DcMotor rhw = hardwareMap.get(DcMotor.class, "right hand wheel");
DcMotor rlw = hardwareMap.get(DcMotor.class, "right leg wheel");
DcMotor lhw = hardwareMap.get(DcMotor.class, "left hand wheel");
DcMotor llw = hardwareMap.get(DcMotor.class, "left leg wheel");
TouchSensor touchSensor = hardwareMap.get(TouchSensor.class, "touchsensor");
DistanceSensor distanceSensor = hardwareMap.get(DistanceSensor.class, "distancesensor");
ColorSensor colorSensor = hardwareMap.get(ColorSensor.class, "colorsensor");
/*
Put all motors in an array for easier handling
NOTE: Placement of motor in array is important
especially when dealing with functions that
set direction
*/
DcMotor[] motors = {rhw, rlw, lhw, llw};
/*
Initialize all wheels forward using motors array
*/
MovementLibrary.setDirectionForward(motors);
/*
Initialize wheel direction with base speed
*/
MovementLibrary.setSpeed(motors, CB_BASE_SPEED);
/*
Wait for the driver to hit START in the driver hub
*/
waitForStart();
while (opModeIsActive()) {
if (touchSensor.isPressed()) {
telemetry.speak("Touch sensor is pressed");
telemetry.addData("Touch sensor pressed?", touchSensor.isPressed());
}
/*
Distance traveled in inches
*/
double distanceInInches = distanceSensor.getDistance(DistanceUnit.INCH);
telemetry.addData("Distance traveled (inches)", "%.2f", distanceInInches);
/*
Capture color readout from color sensor
*/
telemetry.addData("Color Sensor [Red]", colorSensor.red());
telemetry.addData("Color Sensor [Green]", colorSensor.green());
telemetry.addData("Color Sensor [Blue]", colorSensor.blue());
telemetry.addData("Color Sensor [Alpha]", colorSensor.alpha());
/*
Speed up when going over an object that is blue-ish / green
*/
if (colorSensor.blue() > 800 && colorSensor.green() > 1100) {
telemetry.speak("Green speed boost");
MovementLibrary.setSpeed(motors, CB_BASE_SPEED * 2.0);
} else {
MovementLibrary.setSpeed(motors, CB_BASE_SPEED);
}
telemetry.update();
}
}
}

View File

@ -0,0 +1,142 @@
/* Copyright (c) 2019 FIRST. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted (subject to the limitations in the disclaimer below) provided that
* the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list
* of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice, this
* list of conditions and the following disclaimer in the documentation and/or
* other materials provided with the distribution.
*
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
* promote products derived from this software without specific prior written permission.
*
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.firstinspires.ftc.teamcode;
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
import org.firstinspires.ftc.vision.VisionPortal;
import org.firstinspires.ftc.vision.tfod.TfodProcessor;
import java.util.List;
/*
* This OpMode illustrates the basics of TensorFlow Object Detection, using
* the easiest way.
*
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
*/
@TeleOp(name = "Concept: TensorFlow Object Detection Easy", group = "Concept")
public class ConceptTensorFlowObjectDetectionEasy extends LinearOpMode {
private static final boolean USE_WEBCAM = true; // true for webcam, false for phone camera
/**
* The variable to store our instance of the TensorFlow Object Detection processor.
*/
private TfodProcessor tfod;
/**
* The variable to store our instance of the vision portal.
*/
private VisionPortal visionPortal;
@Override
public void runOpMode() {
initTfod();
// Wait for the DS start button to be touched.
telemetry.addData("DS preview on/off", "3 dots, Camera Stream");
telemetry.addData(">", "Touch Play to start OpMode");
telemetry.update();
waitForStart();
if (opModeIsActive()) {
while (opModeIsActive()) {
telemetryTfod();
// Push telemetry to the Driver Station.
telemetry.update();
// Save CPU resources; can resume streaming when needed.
if (gamepad1.dpad_down) {
visionPortal.stopStreaming();
} else if (gamepad1.dpad_up) {
visionPortal.resumeStreaming();
}
// Share the CPU.
sleep(20);
}
}
// Save more CPU resources when camera is no longer needed.
visionPortal.close();
} // end runOpMode()
/**
* Initialize the TensorFlow Object Detection processor.
*/
private void initTfod() {
// Create the TensorFlow processor the easy way.
tfod = TfodProcessor.easyCreateWithDefaults();
// Create the vision portal the easy way.
if (USE_WEBCAM) {
visionPortal = VisionPortal.easyCreateWithDefaults(
hardwareMap.get(WebcamName.class, "Webcam 1"), tfod);
} else {
visionPortal = VisionPortal.easyCreateWithDefaults(
BuiltinCameraDirection.BACK, tfod);
}
} // end method initTfod()
/**
* Add telemetry about TensorFlow Object Detection (TFOD) recognitions.
*/
private void telemetryTfod() {
List<Recognition> currentRecognitions = tfod.getRecognitions();
telemetry.addData("# Objects Detected", currentRecognitions.size());
// Step through the list of recognitions and display info for each one.
for (Recognition recognition : currentRecognitions) {
double x = (recognition.getLeft() + recognition.getRight()) / 2 ;
double y = (recognition.getTop() + recognition.getBottom()) / 2 ;
telemetry.addData(""," ");
telemetry.addData("Image", "%s (%.0f %% Conf.)", recognition.getLabel(), recognition.getConfidence() * 100);
telemetry.addData("- Position", "%.0f / %.0f", x, y);
telemetry.addData("- Size", "%.0f x %.0f", recognition.getWidth(), recognition.getHeight());
} // end for() loop
} // end method telemetryTfod()
} // end class

View File

@ -0,0 +1,7 @@
package org.firstinspires.ftc.teamcode.cometbots;
public class Constants {
public static double CB_BASE_SPEED = 0.2;
}

View File

@ -0,0 +1,40 @@
package org.firstinspires.ftc.teamcode.cometbots;
import com.qualcomm.robotcore.hardware.DcMotor;
public class MovementLibrary {
public static void setDirectionForward(DcMotor[] motors) {
motors[0].setDirection(DcMotor.Direction.REVERSE);
motors[1].setDirection(DcMotor.Direction.FORWARD);
motors[2].setDirection(DcMotor.Direction.FORWARD);
motors[3].setDirection(DcMotor.Direction.REVERSE);
}
public static void setDirectionBackward(DcMotor[] motors) {
motors[0].setDirection(DcMotor.Direction.FORWARD);
motors[1].setDirection(DcMotor.Direction.REVERSE);
motors[2].setDirection(DcMotor.Direction.REVERSE);
motors[3].setDirection(DcMotor.Direction.FORWARD);
}
public static void setSpeed(DcMotor[] motors, double speedValue) {
for (DcMotor motor : motors) {
motor.setPower(speedValue);
}
}
public static void setDirectionRight(DcMotor[] motors) {
motors[0].setDirection(DcMotor.Direction.REVERSE);
motors[1].setDirection(DcMotor.Direction.REVERSE);
motors[2].setDirection(DcMotor.Direction.FORWARD);
motors[3].setDirection(DcMotor.Direction.FORWARD);
}
public static void setDirectionLeft(DcMotor[] motors) {
motors[0].setDirection(DcMotor.Direction.FORWARD);
motors[1].setDirection(DcMotor.Direction.FORWARD);
motors[2].setDirection(DcMotor.Direction.REVERSE);
motors[3].setDirection(DcMotor.Direction.REVERSE);
}
}

View File

@ -0,0 +1,4 @@
https://www.youtube.com/watch?v=aeMWWvteF2U
https://www.youtube.com/watch?v=1xdo8QP6_mI

View File

@ -1,21 +1,23 @@
repositories {
mavenCentral()
google() // Needed for androidx
maven { url = 'https://maven.brott.dev/' }
}
dependencies {
implementation 'org.firstinspires.ftc:Inspection:9.0.1'
implementation 'org.firstinspires.ftc:Blocks:9.0.1'
implementation 'org.firstinspires.ftc:Tfod:9.0.1'
implementation 'org.firstinspires.ftc:RobotCore:9.0.1'
implementation 'org.firstinspires.ftc:RobotServer:9.0.1'
implementation 'org.firstinspires.ftc:OnBotJava:9.0.1'
implementation 'org.firstinspires.ftc:Hardware:9.0.1'
implementation 'org.firstinspires.ftc:FtcCommon:9.0.1'
implementation 'org.firstinspires.ftc:Vision:9.0.1'
implementation 'org.firstinspires.ftc:Inspection:9.1.0'
implementation 'org.firstinspires.ftc:Blocks:9.1.0'
implementation 'org.firstinspires.ftc:Tfod:9.1.0'
implementation 'org.firstinspires.ftc:RobotCore:9.1.0'
implementation 'org.firstinspires.ftc:RobotServer:9.1.0'
implementation 'org.firstinspires.ftc:OnBotJava:9.1.0'
implementation 'org.firstinspires.ftc:Hardware:9.1.0'
implementation 'org.firstinspires.ftc:FtcCommon:9.1.0'
implementation 'org.firstinspires.ftc:Vision:9.1.0'
implementation 'org.firstinspires.ftc:gameAssets-CenterStage:1.0.0'
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.4.3'
runtimeOnly 'org.tensorflow:tensorflow-lite:2.12.0'
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'com.acmerobotics.dashboard:dashboard:0.4.15' // FTC Dashboard
}