FtcRobotController v7.0
This commit is contained in:
@ -9,8 +9,9 @@ android {
|
||||
|
||||
defaultConfig {
|
||||
minSdkVersion 23
|
||||
//noinspection ExpiredTargetSdkVersion
|
||||
targetSdkVersion 28
|
||||
buildConfigField "String", "BUILD_TIME", '"' + (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(new Date())) + '"'
|
||||
buildConfigField "String", "APP_BUILD_TIME", '"' + (new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSSZ", Locale.ROOT).format(new Date())) + '"'
|
||||
}
|
||||
|
||||
compileSdkVersion 29
|
||||
|
@ -2,8 +2,8 @@
|
||||
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
|
||||
xmlns:tools="http://schemas.android.com/tools"
|
||||
package="com.qualcomm.ftcrobotcontroller"
|
||||
android:versionCode="40"
|
||||
android:versionName="6.2">
|
||||
android:versionCode="42"
|
||||
android:versionName="7.0">
|
||||
|
||||
<uses-permission android:name="android.permission.RECEIVE_BOOT_COMPLETED" />
|
||||
|
||||
@ -61,6 +61,21 @@
|
||||
android:name="com.qualcomm.ftccommon.FtcRobotControllerService"
|
||||
android:enabled="true" />
|
||||
|
||||
|
||||
<!-- Assistant that autostarts the robot controller on android boot (if it's supposed to) -->
|
||||
<receiver
|
||||
android:enabled="true"
|
||||
android:exported="true"
|
||||
android:name="org.firstinspires.ftc.ftccommon.internal.RunOnBoot"
|
||||
android:permission="android.permission.RECEIVE_BOOT_COMPLETED">
|
||||
|
||||
<intent-filter>
|
||||
<category android:name="android.intent.category.DEFAULT" />
|
||||
<action android:name="android.intent.action.BOOT_COMPLETED" />
|
||||
<action android:name="android.intent.action.QUICKBOOT_POWERON" />
|
||||
</intent-filter>
|
||||
</receiver>
|
||||
|
||||
</application>
|
||||
|
||||
</manifest>
|
||||
|
@ -0,0 +1,200 @@
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.hardware.Gamepad;
|
||||
import com.qualcomm.robotcore.util.ElapsedTime;
|
||||
|
||||
/**
|
||||
* This sample illustrates using the rumble feature of many gamepads.
|
||||
*
|
||||
* Note: Some gamepads "rumble" better than others.
|
||||
* The Xbox & PS4 have a left (rumble1) and right (rumble2) rumble motor.
|
||||
* These two gamepads have two distinct rumble modes: Large on the left, and small on the right
|
||||
* The ETpark gamepad may only respond to rumble1, and may only run at full power.
|
||||
* The Logitech F310 gamepad does not have *any* rumble ability.
|
||||
*
|
||||
* Moral: You should use this sample to experiment with your specific gamepads to explore their rumble features.
|
||||
*
|
||||
* The rumble motors are accessed through the standard gamepad1 and gamepad2 objects.
|
||||
* Several new methods were added to the Gamepad class in FTC SDK Rev 7
|
||||
* The key methods are as follows:
|
||||
*
|
||||
* .rumble(double rumble1, double rumble2, int durationMs)
|
||||
* This method sets the rumble power of both motors for a specific time duration.
|
||||
* Both rumble arguments are motor-power levels in the 0.0 to 1.0 range.
|
||||
* durationMs is the desired length of the rumble action in milliseconds.
|
||||
* This method returns immediately.
|
||||
* Note:
|
||||
* Use a durationMs of Gamepad.RUMBLE_DURATION_CONTINUOUS to provide a continuous rumble
|
||||
* Use a power of 0, or duration of 0 to stop a rumble.
|
||||
*
|
||||
* .rumbleBlips(int count) allows an easy way to signal the driver with a series of rumble blips.
|
||||
* Just specify how many blips you want.
|
||||
* This method returns immediately.
|
||||
*
|
||||
* .runRumbleEffect(customRumbleEffect) allows you to run a custom rumble sequence that you have
|
||||
* built using the Gamepad.RumbleEffect.Builder()
|
||||
* A "custom effect" is a sequence of steps, where each step can rumble any of the
|
||||
* rumble motors for a specific period at a specific power level.
|
||||
* The Custom Effect will play as the (un-blocked) OpMode continues to run
|
||||
*
|
||||
* .isRumbling() returns true if there is a rumble effect in progress.
|
||||
* Use this call to prevent stepping on a current rumble.
|
||||
*
|
||||
* .stopRumble() Stop any ongoing rumble or custom rumble effect.
|
||||
*
|
||||
* .rumble(int durationMs) Full power rumble for fixed duration.
|
||||
*
|
||||
* Note: Whenever a new Rumble command is issued, any currently executing rumble action will
|
||||
* be truncated, and the new action started immediately. Take these precautions:
|
||||
* 1) Do Not SPAM the rumble motors by issuing rapid fire commands
|
||||
* 2) Multiple sources for rumble commands must coordinate to avoid tromping on each other.
|
||||
*
|
||||
* This can be achieved several possible ways:
|
||||
* 1) Only having one source for rumble actions
|
||||
* 2) Issuing rumble commands on transitions, rather than states.
|
||||
* e.g. The moment a touch sensor is pressed, rather than the entire time it is being pressed.
|
||||
* 3) Scheduling rumble commands based on timed events. e.g. 10 seconds prior to endgame
|
||||
* 4) Rumble on non-overlapping mechanical actions. e.g. arm fully-extended or fully-retracted.
|
||||
* 5) Use isRumbling() to hold off on a new rumble if one is already in progress.
|
||||
*
|
||||
* The examples shown here are representstive of how to invoke a gamepad rumble.
|
||||
* It is assumed that these will be modified to suit the specific robot and team strategy needs.
|
||||
*
|
||||
* ######## Read the telemetry display on the Driver Station Screen for instructions. ######
|
||||
*
|
||||
* Ex 1) This example shows a) how to create a custom rumble effect, and then b) how to trigger it based
|
||||
* on game time. One use for this might be to alert the driver that half-time or End-game is approaching.
|
||||
*
|
||||
* Ex 2) This example shows tying the rumble power to a changing sensor value.
|
||||
* In this case it is the Gamepad trigger, but it could be any sensor output scaled to the 0-1 range.
|
||||
* Since it takes over the rumble motors, it is only performed when the Left Bumper is pressed.
|
||||
* Note that this approach MUST include a way to turn OFF the rumble when the button is released.
|
||||
*
|
||||
* Ex 3) This example shows a simple way to trigger a 3-blip sequence. In this case it is
|
||||
* triggered by the gamepad A (Cross) button, but it could be any sensor, like a touch or light sensor.
|
||||
* Note that this code ensures that it only rumbles once when the input goes true.
|
||||
*
|
||||
* Ex 4) This example shows how to trigger a single rumble when an input value gets over a certain value.
|
||||
* In this case it is reading the Right Trigger, but it could be any variable sensor, like a
|
||||
* range sensor, or position sensor. The code needs to ensure that it is only triggered once, so
|
||||
* it waits till the sensor drops back below the threshold before it can trigger again.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*/
|
||||
|
||||
@Disabled
|
||||
@TeleOp(name="Concept: Gamepad Rumble", group ="Concept")
|
||||
public class ConceptGamepadRumble extends LinearOpMode
|
||||
{
|
||||
boolean lastA = false; // Use to track the prior button state.
|
||||
boolean lastLB = false; // Use to track the prior button state.
|
||||
boolean highLevel = false; // used to prevent multiple level-based rumbles.
|
||||
boolean secondHalf = false; // Use to prevent multiple half-time warning rumbles.
|
||||
|
||||
Gamepad.RumbleEffect customRumbleEffect; // Use to build a custom rumble sequence.
|
||||
ElapsedTime runtime = new ElapsedTime(); // Use to determine when end game is starting.
|
||||
|
||||
final double HALF_TIME = 60.0; // Wait this many seconds before rumble-alert for half-time.
|
||||
final double TRIGGER_THRESHOLD = 0.75; // Squeeze more than 3/4 to get rumble.
|
||||
|
||||
@Override
|
||||
public void runOpMode()
|
||||
{
|
||||
// Example 1. a) start by creating a three-pulse rumble sequence: right, LEFT, LEFT
|
||||
customRumbleEffect = new Gamepad.RumbleEffect.Builder()
|
||||
.addStep(0.0, 1.0, 500) // Rumble right motor 100% for 500 mSec
|
||||
.addStep(0.0, 0.0, 300) // Pause for 300 mSec
|
||||
.addStep(1.0, 0.0, 250) // Rumble left motor 100% for 250 mSec
|
||||
.addStep(0.0, 0.0, 250) // Pause for 250 mSec
|
||||
.addStep(1.0, 0.0, 250) // Rumble left motor 100% for 250 mSec
|
||||
.build();
|
||||
|
||||
telemetry.addData(">", "Press Start");
|
||||
telemetry.update();
|
||||
|
||||
waitForStart();
|
||||
runtime.reset(); // Start game timer.
|
||||
|
||||
// Loop while monitoring buttons for rumble triggers
|
||||
while (opModeIsActive())
|
||||
{
|
||||
// Read and save the current gamepad button states.
|
||||
boolean currentA = gamepad1.a ;
|
||||
boolean currentLB = gamepad1.left_bumper ;
|
||||
|
||||
// Display the current Rumble status. Just for interest.
|
||||
telemetry.addData(">", "Are we RUMBLING? %s\n", gamepad1.isRumbling() ? "YES" : "no" );
|
||||
|
||||
// ----------------------------------------------------------------------------------------
|
||||
// Example 1. b) Watch the runtime timer, and run the custom rumble when we hit half-time.
|
||||
// Make sure we only signal once by setting "secondHalf" flag to prevent further rumbles.
|
||||
// ----------------------------------------------------------------------------------------
|
||||
if ((runtime.seconds() > HALF_TIME) && !secondHalf) {
|
||||
gamepad1.runRumbleEffect(customRumbleEffect);
|
||||
secondHalf =true;
|
||||
}
|
||||
|
||||
// Display the time remaining while we are still counting down.
|
||||
if (!secondHalf) {
|
||||
telemetry.addData(">", "Halftime Alert Countdown: %3.0f Sec \n", (HALF_TIME - runtime.seconds()) );
|
||||
}
|
||||
|
||||
|
||||
// ----------------------------------------------------------------------------------------
|
||||
// Example 2. If Left Bumper is being pressed, power the rumble motors based on the two trigger depressions.
|
||||
// This is useful to see how the rumble feels at various power levels.
|
||||
// ----------------------------------------------------------------------------------------
|
||||
if (currentLB) {
|
||||
// Left Bumper is being pressed, so send left and right "trigger" values to left and right rumble motors.
|
||||
gamepad1.rumble(gamepad1.left_trigger, gamepad1.right_trigger, Gamepad.RUMBLE_DURATION_CONTINUOUS);
|
||||
|
||||
// Show what is being sent to rumbles
|
||||
telemetry.addData(">", "Squeeze triggers to control rumbles");
|
||||
telemetry.addData("> : Rumble", "Left: %.0f%% Right: %.0f%%", gamepad1.left_trigger * 100, gamepad1.right_trigger * 100);
|
||||
} else {
|
||||
// Make sure rumble is turned off when Left Bumper is released (only one time each press)
|
||||
if (lastLB) {
|
||||
gamepad1.stopRumble();
|
||||
}
|
||||
|
||||
// Prompt for manual rumble action
|
||||
telemetry.addData(">", "Hold Left-Bumper to test Manual Rumble");
|
||||
telemetry.addData(">", "Press A (Cross) for three blips");
|
||||
telemetry.addData(">", "Squeeze right trigger slowly for 1 blip");
|
||||
}
|
||||
lastLB = currentLB; // remember the current button state for next time around the loop
|
||||
|
||||
|
||||
// ----------------------------------------------------------------------------------------
|
||||
// Example 3. Blip 3 times at the moment that A (Cross) is pressed. (look for pressed transition)
|
||||
// BUT !!! Skip it altogether if the Gamepad is already rumbling.
|
||||
// ----------------------------------------------------------------------------------------
|
||||
if (currentA && !lastA) {
|
||||
if (!gamepad1.isRumbling()) // Check for possible overlap of rumbles.
|
||||
gamepad1.rumbleBlips(3);
|
||||
}
|
||||
lastA = currentA; // remember the current button state for next time around the loop
|
||||
|
||||
|
||||
// ----------------------------------------------------------------------------------------
|
||||
// Example 4. Rumble once when gamepad right trigger goes above the THRESHOLD.
|
||||
// ----------------------------------------------------------------------------------------
|
||||
if (gamepad1.right_trigger > TRIGGER_THRESHOLD) {
|
||||
if (!highLevel) {
|
||||
gamepad1.rumble(0.9, 0, 200); // 200 mSec burst on left motor.
|
||||
highLevel = true; // Hold off any more triggers
|
||||
}
|
||||
} else {
|
||||
highLevel = false; // We can trigger again now.
|
||||
}
|
||||
|
||||
// Send the telemetry data to the Driver Station, and then pause to pace the program.
|
||||
telemetry.update();
|
||||
sleep(10);
|
||||
}
|
||||
}
|
||||
}
|
@ -0,0 +1,78 @@
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.Telemetry;
|
||||
|
||||
/**
|
||||
* This sample illustrates using the touchpad feature found on some gamepads.
|
||||
*
|
||||
* The Sony PS4 gamepad can detect two distinct touches on the central touchpad.
|
||||
* Other gamepads with different touchpads may provide mixed results.
|
||||
*
|
||||
* The touchpads are accessed through the standard gamepad1 and gamepad2 objects.
|
||||
* Several new members were added to the Gamepad class in FTC SDK Rev 7
|
||||
*
|
||||
* .touchpad_finger_1 returns true if at least one finger is detected.
|
||||
* .touchpad_finger_1_x finger 1 X coordinate. Valid if touchpad_finger_1 is true
|
||||
* .touchpad_finger_1_y finger 1 Y coordinate. Valid if touchpad_finger_1 is true
|
||||
*
|
||||
* .touchpad_finger_2 returns true if a second finger is detected
|
||||
* .touchpad_finger_2_x finger 2 X coordinate. Valid if touchpad_finger_2 is true
|
||||
* .touchpad_finger_2_y finger 2 Y coordinate. Valid if touchpad_finger_2 is true
|
||||
*
|
||||
* Finger touches are reported with an X and Y coordinate in following coordinate system.
|
||||
*
|
||||
* 1) X is the Horizontal axis, and Y is the vertical axis
|
||||
* 2) The 0,0 origin is at the center of the touchpad.
|
||||
* 3) 1.0, 1.0 is at the top right corner of the touchpad.
|
||||
* 4) -1.0,-1.0 is at the bottom left corner of the touchpad.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this OpMode to the Driver Station OpMode list.
|
||||
*/
|
||||
|
||||
@Disabled
|
||||
@TeleOp(name="Concept: Gamepad Touchpad", group ="Concept")
|
||||
public class ConceptGamepadTouchpad extends LinearOpMode
|
||||
{
|
||||
@Override
|
||||
public void runOpMode()
|
||||
{
|
||||
telemetry.setDisplayFormat(Telemetry.DisplayFormat.MONOSPACE);
|
||||
|
||||
telemetry.addData(">", "Press Start");
|
||||
telemetry.update();
|
||||
|
||||
waitForStart();
|
||||
|
||||
while (opModeIsActive())
|
||||
{
|
||||
boolean finger = false;
|
||||
|
||||
// Display finger 1 x & y position if finger detected
|
||||
if(gamepad1.touchpad_finger_1)
|
||||
{
|
||||
finger = true;
|
||||
telemetry.addLine(String.format("Finger 1: x=%5.2f y=%5.2f\n", gamepad1.touchpad_finger_1_x, gamepad1.touchpad_finger_1_y));
|
||||
}
|
||||
|
||||
// Display finger 2 x & y position if finger detected
|
||||
if(gamepad1.touchpad_finger_2)
|
||||
{
|
||||
finger = true;
|
||||
telemetry.addLine(String.format("Finger 2: x=%5.2f y=%5.2f\n", gamepad1.touchpad_finger_2_x, gamepad1.touchpad_finger_2_y));
|
||||
}
|
||||
|
||||
if(!finger)
|
||||
{
|
||||
telemetry.addLine("No fingers");
|
||||
}
|
||||
|
||||
telemetry.update();
|
||||
sleep(10);
|
||||
}
|
||||
}
|
||||
}
|
@ -41,7 +41,7 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
|
||||
/**
|
||||
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
|
||||
* determine the position of the Ultimate Goal game elements.
|
||||
* determine the position of the Freight Frenzy game elements.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
|
||||
@ -52,9 +52,24 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
@TeleOp(name = "Concept: TensorFlow Object Detection", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptTensorFlowObjectDetection extends LinearOpMode {
|
||||
private static final String TFOD_MODEL_ASSET = "UltimateGoal.tflite";
|
||||
private static final String LABEL_FIRST_ELEMENT = "Quad";
|
||||
private static final String LABEL_SECOND_ELEMENT = "Single";
|
||||
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
|
||||
* the following 4 detectable objects
|
||||
* 0: Ball,
|
||||
* 1: Cube,
|
||||
* 2: Duck,
|
||||
* 3: Marker (duck location tape marker)
|
||||
*
|
||||
* Two additional model assets are available which only contain a subset of the objects:
|
||||
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
|
||||
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
|
||||
*/
|
||||
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
|
||||
private static final String[] LABELS = {
|
||||
"Ball",
|
||||
"Cube",
|
||||
"Duck",
|
||||
"Marker"
|
||||
};
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
@ -128,16 +143,13 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
|
||||
recognition.getLeft(), recognition.getTop());
|
||||
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
|
||||
recognition.getRight(), recognition.getBottom());
|
||||
i++;
|
||||
}
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (tfod != null) {
|
||||
tfod.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -166,7 +178,9 @@ public class ConceptTensorFlowObjectDetection extends LinearOpMode {
|
||||
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
|
||||
tfodParameters.minResultConfidence = 0.8f;
|
||||
tfodParameters.isModelTensorFlow2 = true;
|
||||
tfodParameters.inputSize = 320;
|
||||
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
|
||||
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABEL_FIRST_ELEMENT, LABEL_SECOND_ELEMENT);
|
||||
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
|
||||
}
|
||||
}
|
||||
|
@ -42,7 +42,7 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
|
||||
/**
|
||||
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
|
||||
* determine the position of the Ultimate Goal game elements.
|
||||
* determine the position of the Freight Frenzy game elements.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
|
||||
@ -53,9 +53,24 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
@TeleOp(name = "Concept: TensorFlow Object Detection Switchable Cameras", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpMode {
|
||||
private static final String TFOD_MODEL_ASSET = "UltimateGoal.tflite";
|
||||
private static final String LABEL_FIRST_ELEMENT = "Quad";
|
||||
private static final String LABEL_SECOND_ELEMENT = "Single";
|
||||
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
|
||||
* the following 4 detectable objects
|
||||
* 0: Ball,
|
||||
* 1: Cube,
|
||||
* 2: Duck,
|
||||
* 3: Marker (duck location tape marker)
|
||||
*
|
||||
* Two additional model assets are available which only contain a subset of the objects:
|
||||
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
|
||||
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
|
||||
*/
|
||||
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
|
||||
private static final String[] LABELS = {
|
||||
"Ball",
|
||||
"Cube",
|
||||
"Duck",
|
||||
"Marker"
|
||||
};
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
@ -134,15 +149,12 @@ public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpM
|
||||
recognition.getLeft(), recognition.getTop());
|
||||
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
|
||||
recognition.getRight(), recognition.getBottom());
|
||||
i++;
|
||||
}
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (tfod != null) {
|
||||
tfod.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -179,8 +191,10 @@ public class ConceptTensorFlowObjectDetectionSwitchableCameras extends LinearOpM
|
||||
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
|
||||
tfodParameters.minResultConfidence = 0.8f;
|
||||
tfodParameters.isModelTensorFlow2 = true;
|
||||
tfodParameters.inputSize = 320;
|
||||
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
|
||||
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABEL_FIRST_ELEMENT, LABEL_SECOND_ELEMENT);
|
||||
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
|
||||
}
|
||||
|
||||
private void doCameraSwitching() {
|
||||
|
@ -41,7 +41,7 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
|
||||
/**
|
||||
* This 2020-2021 OpMode illustrates the basics of using the TensorFlow Object Detection API to
|
||||
* determine the position of the Ultimate Goal game elements.
|
||||
* determine the position of the Freight Frenzy game elements.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
|
||||
@ -52,9 +52,24 @@ import org.firstinspires.ftc.robotcore.external.tfod.Recognition;
|
||||
@TeleOp(name = "Concept: TensorFlow Object Detection Webcam", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
|
||||
private static final String TFOD_MODEL_ASSET = "UltimateGoal.tflite";
|
||||
private static final String LABEL_FIRST_ELEMENT = "Quad";
|
||||
private static final String LABEL_SECOND_ELEMENT = "Single";
|
||||
/* Note: This sample uses the all-objects Tensor Flow model (FreightFrenzy_BCDM.tflite), which contains
|
||||
* the following 4 detectable objects
|
||||
* 0: Ball,
|
||||
* 1: Cube,
|
||||
* 2: Duck,
|
||||
* 3: Marker (duck location tape marker)
|
||||
*
|
||||
* Two additional model assets are available which only contain a subset of the objects:
|
||||
* FreightFrenzy_BC.tflite 0: Ball, 1: Cube
|
||||
* FreightFrenzy_DM.tflite 0: Duck, 1: Marker
|
||||
*/
|
||||
private static final String TFOD_MODEL_ASSET = "FreightFrenzy_BCDM.tflite";
|
||||
private static final String[] LABELS = {
|
||||
"Ball",
|
||||
"Cube",
|
||||
"Duck",
|
||||
"Marker"
|
||||
};
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
@ -127,16 +142,13 @@ public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
|
||||
recognition.getLeft(), recognition.getTop());
|
||||
telemetry.addData(String.format(" right,bottom (%d)", i), "%.03f , %.03f",
|
||||
recognition.getRight(), recognition.getBottom());
|
||||
i++;
|
||||
}
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (tfod != null) {
|
||||
tfod.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -165,7 +177,9 @@ public class ConceptTensorFlowObjectDetectionWebcam extends LinearOpMode {
|
||||
"tfodMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
TFObjectDetector.Parameters tfodParameters = new TFObjectDetector.Parameters(tfodMonitorViewId);
|
||||
tfodParameters.minResultConfidence = 0.8f;
|
||||
tfodParameters.isModelTensorFlow2 = true;
|
||||
tfodParameters.inputSize = 320;
|
||||
tfod = ClassFactory.getInstance().createTFObjectDetector(tfodParameters, vuforia);
|
||||
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABEL_FIRST_ELEMENT, LABEL_SECOND_ELEMENT);
|
||||
tfod.loadModelFromAsset(TFOD_MODEL_ASSET, LABELS);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,202 @@
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.hardware.DcMotor;
|
||||
import com.qualcomm.robotcore.util.Range;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.VectorF;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
|
||||
|
||||
/**
|
||||
* This OpMode illustrates using a webcam to locate and drive towards ANY Vuforia target.
|
||||
* The code assumes a basic two-wheel Robot Configuration with motors named left_drive and right_drive.
|
||||
* The motor directions must be set so a positive drive goes forward and a positive turn rotates to the right.
|
||||
*
|
||||
* Under manual control, the left stick will move forward/back, and the right stick will turn left/right.
|
||||
* This is called POV Joystick mode, different than Tank Drive (where each joystick controls a wheel).
|
||||
* Manually drive the robot until it displays Target data on the Driver Station.
|
||||
* Press and hold the *Left Bumper* to enable the automatic "Drive to target" mode.
|
||||
* Release the Left Bumper to return to manual driving mode.
|
||||
*
|
||||
* Use DESIRED_DISTANCE to set how close you want the robot to get to the target.
|
||||
* Speed and Turn sensitivity can be adjusted using the SPEED_GAIN and TURN_GAIN constants.
|
||||
*
|
||||
* For more Vuforia details, or to adapt this OpMode for a phone camera, view the
|
||||
* ConceptVuforiaFieldNavigation and ConceptVuforiaFieldNavigationWebcam samples.
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
|
||||
*
|
||||
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
|
||||
* is explained below.
|
||||
*/
|
||||
|
||||
@TeleOp(name="Drive To Target", group = "Concept")
|
||||
@Disabled
|
||||
public class ConceptVuforiaDriveToTargetWebcam extends LinearOpMode
|
||||
{
|
||||
// Adjust these numbers to suit your robot.
|
||||
final double DESIRED_DISTANCE = 8.0; // this is how close the camera should get to the target (inches)
|
||||
// The GAIN constants set the relationship between the measured position error,
|
||||
// and how much power is applied to the drive motors. Drive = Error * Gain
|
||||
// Make these values smaller for smoother control.
|
||||
final double SPEED_GAIN = 0.02 ; // Speed Control "Gain". eg: Ramp up to 50% power at a 25 inch error. (0.50 / 25.0)
|
||||
final double TURN_GAIN = 0.01 ; // Turn Control "Gain". eg: Ramp up to 25% power at a 25 degree error. (0.25 / 25.0)
|
||||
|
||||
final double MM_PER_INCH = 25.40 ; // Metric conversion
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
|
||||
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
|
||||
* web site at https://developer.vuforia.com/license-manager.
|
||||
*
|
||||
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
|
||||
* random data. As an example, here is a example of a fragment of a valid key:
|
||||
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
|
||||
* Once you've obtained a license key, copy the string from the Vuforia web site
|
||||
* and paste it in to your code on the next line, between the double quotes.
|
||||
*/
|
||||
private static final String VUFORIA_KEY =
|
||||
" --- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
|
||||
VuforiaLocalizer vuforia = null;
|
||||
OpenGLMatrix targetPose = null;
|
||||
String targetName = "";
|
||||
|
||||
private DcMotor leftDrive = null;
|
||||
private DcMotor rightDrive = null;
|
||||
|
||||
@Override public void runOpMode()
|
||||
{
|
||||
/*
|
||||
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
|
||||
* To get an on-phone camera preview, use the code below.
|
||||
* If no camera preview is desired, use the parameter-less constructor instead (commented out below).
|
||||
*/
|
||||
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
|
||||
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
parameters.vuforiaLicenseKey = VUFORIA_KEY;
|
||||
|
||||
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
|
||||
parameters.useExtendedTracking = false;
|
||||
|
||||
// Connect to the camera we are to use. This name must match what is set up in Robot Configuration
|
||||
parameters.cameraName = hardwareMap.get(WebcamName.class, "Webcam 1");
|
||||
this.vuforia = ClassFactory.getInstance().createVuforia(parameters);
|
||||
|
||||
// Load the trackable objects from the Assets file, and give them meaningful names
|
||||
VuforiaTrackables targetsFreightFrenzy = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
|
||||
targetsFreightFrenzy.get(0).setName("Blue Storage");
|
||||
targetsFreightFrenzy.get(1).setName("Blue Alliance Wall");
|
||||
targetsFreightFrenzy.get(2).setName("Red Storage");
|
||||
targetsFreightFrenzy.get(3).setName("Red Alliance Wall");
|
||||
|
||||
// Start tracking targets in the background
|
||||
targetsFreightFrenzy.activate();
|
||||
|
||||
// Initialize the hardware variables. Note that the strings used here as parameters
|
||||
// to 'get' must correspond to the names assigned during the robot configuration
|
||||
// step (using the FTC Robot Controller app on the phone).
|
||||
leftDrive = hardwareMap.get(DcMotor.class, "left_drive");
|
||||
rightDrive = hardwareMap.get(DcMotor.class, "right_drive");
|
||||
|
||||
// To drive forward, most robots need the motor on one side to be reversed, because the axles point in opposite directions.
|
||||
// Pushing the left stick forward MUST make robot go forward. So adjust these two lines based on your first test drive.
|
||||
leftDrive.setDirection(DcMotor.Direction.FORWARD);
|
||||
rightDrive.setDirection(DcMotor.Direction.REVERSE);
|
||||
|
||||
telemetry.addData(">", "Press Play to start");
|
||||
telemetry.update();
|
||||
|
||||
waitForStart();
|
||||
|
||||
boolean targetFound = false; // Set to true when a target is detected by Vuforia
|
||||
double targetRange = 0; // Distance from camera to target in Inches
|
||||
double targetBearing = 0; // Robot Heading, relative to target. Positive degrees means target is to the right.
|
||||
double drive = 0; // Desired forward power (-1 to +1)
|
||||
double turn = 0; // Desired turning power (-1 to +1)
|
||||
|
||||
while (opModeIsActive())
|
||||
{
|
||||
// Look for first visible target, and save its pose.
|
||||
targetFound = false;
|
||||
for (VuforiaTrackable trackable : targetsFreightFrenzy)
|
||||
{
|
||||
if (((VuforiaTrackableDefaultListener) trackable.getListener()).isVisible())
|
||||
{
|
||||
targetPose = ((VuforiaTrackableDefaultListener)trackable.getListener()).getVuforiaCameraFromTarget();
|
||||
|
||||
// if we have a target, process the "pose" to determine the position of the target relative to the robot.
|
||||
if (targetPose != null)
|
||||
{
|
||||
targetFound = true;
|
||||
targetName = trackable.getName();
|
||||
VectorF trans = targetPose.getTranslation();
|
||||
|
||||
// Extract the X & Y components of the offset of the target relative to the robot
|
||||
double targetX = trans.get(0) / MM_PER_INCH; // Image X axis
|
||||
double targetY = trans.get(2) / MM_PER_INCH; // Image Z axis
|
||||
|
||||
// target range is based on distance from robot position to origin (right triangle).
|
||||
targetRange = Math.hypot(targetX, targetY);
|
||||
|
||||
// target bearing is based on angle formed between the X axis and the target range line
|
||||
targetBearing = Math.toDegrees(Math.asin(targetX / targetRange));
|
||||
|
||||
break; // jump out of target tracking loop if we find a target.
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Tell the driver what we see, and what to do.
|
||||
if (targetFound) {
|
||||
telemetry.addData(">","HOLD Left-Bumper to Drive to Target\n");
|
||||
telemetry.addData("Target", " %s", targetName);
|
||||
telemetry.addData("Range", "%5.1f inches", targetRange);
|
||||
telemetry.addData("Bearing","%3.0f degrees", targetBearing);
|
||||
} else {
|
||||
telemetry.addData(">","Drive using joystick to find target\n");
|
||||
}
|
||||
|
||||
// Drive to target Automatically if Left Bumper is being pressed, AND we have found a target.
|
||||
if (gamepad1.left_bumper && targetFound) {
|
||||
|
||||
// Determine heading and range error so we can use them to control the robot automatically.
|
||||
double rangeError = (targetRange - DESIRED_DISTANCE);
|
||||
double headingError = targetBearing;
|
||||
|
||||
// Use the speed and turn "gains" to calculate how we want the robot to move.
|
||||
drive = rangeError * SPEED_GAIN;
|
||||
turn = headingError * TURN_GAIN ;
|
||||
|
||||
telemetry.addData("Auto","Drive %5.2f, Turn %5.2f", drive, turn);
|
||||
} else {
|
||||
|
||||
// drive using manual POV Joystick mode.
|
||||
drive = -gamepad1.left_stick_y / 2.0; // Reduce drive rate to 50%.
|
||||
turn = gamepad1.right_stick_x / 4.0; // Reduce turn rate to 25%.
|
||||
telemetry.addData("Manual","Drive %5.2f, Turn %5.2f", drive, turn);
|
||||
}
|
||||
telemetry.update();
|
||||
|
||||
// Calculate left and right wheel powers and send to them to the motors.
|
||||
double leftPower = Range.clip(drive + turn, -1.0, 1.0) ;
|
||||
double rightPower = Range.clip(drive - turn, -1.0, 1.0) ;
|
||||
leftDrive.setPower(leftPower);
|
||||
rightDrive.setPower(rightPower);
|
||||
|
||||
sleep(10);
|
||||
}
|
||||
}
|
||||
}
|
@ -52,29 +52,19 @@ import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.
|
||||
import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer.CameraDirection.BACK;
|
||||
|
||||
/**
|
||||
* This 2020-2021 OpMode illustrates the basics of using the Vuforia localizer to determine
|
||||
* positioning and orientation of robot on the ULTIMATE GOAL FTC field.
|
||||
* The code is structured as a LinearOpMode
|
||||
* This OpMode illustrates using the Vuforia localizer to determine positioning and orientation of
|
||||
* robot on the FTC field using the RC phone's camera. The code is structured as a LinearOpMode
|
||||
*
|
||||
* Note: If you are using a WEBCAM see ConceptVuforiaFieldNavigationWebcam.java
|
||||
*
|
||||
* When images are located, Vuforia is able to determine the position and orientation of the
|
||||
* image relative to the camera. This sample code then combines that information with a
|
||||
* knowledge of where the target images are on the field, to determine the location of the camera.
|
||||
*
|
||||
* From the Audience perspective, the Red Alliance station is on the right and the
|
||||
* Blue Alliance Station is on the left.
|
||||
|
||||
* There are a total of five image targets for the ULTIMATE GOAL game.
|
||||
* Three of the targets are placed in the center of the Red Alliance, Audience (Front),
|
||||
* and Blue Alliance perimeter walls.
|
||||
* Two additional targets are placed on the perimeter wall, one in front of each Tower Goal.
|
||||
* Refer to the Field Setup manual for more specific location details
|
||||
*
|
||||
* A final calculation then uses the location of the camera on the robot to determine the
|
||||
* Finally, the location of the camera on the robot is used to determine the
|
||||
* robot's location and orientation on the field.
|
||||
*
|
||||
* @see VuforiaLocalizer
|
||||
* @see VuforiaTrackableDefaultListener
|
||||
* see ultimategoal/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
|
||||
* To learn more about the FTC field coordinate model, see FTC_FieldCoordinateSystemDefinition.pdf in this folder
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
|
||||
@ -83,17 +73,14 @@ import static org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocaliz
|
||||
* is explained below.
|
||||
*/
|
||||
|
||||
|
||||
@TeleOp(name="ULTIMATEGOAL Vuforia Nav", group ="Concept")
|
||||
@TeleOp(name="Vuforia Field Nav", group ="Concept")
|
||||
@Disabled
|
||||
public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
|
||||
public class ConceptVuforiaFieldNavigation extends LinearOpMode {
|
||||
|
||||
// IMPORTANT: For Phone Camera, set 1) the camera source and 2) the orientation, based on how your phone is mounted:
|
||||
// 1) Camera Source. Valid choices are: BACK (behind screen) or FRONT (selfie side)
|
||||
// 2) Phone Orientation. Choices are: PHONE_IS_PORTRAIT = true (portrait) or PHONE_IS_PORTRAIT = false (landscape)
|
||||
//
|
||||
// NOTE: If you are running on a CONTROL HUB, with only one USB WebCam, you must select CAMERA_CHOICE = BACK; and PHONE_IS_PORTRAIT = false;
|
||||
//
|
||||
|
||||
private static final VuforiaLocalizer.CameraDirection CAMERA_CHOICE = BACK;
|
||||
private static final boolean PHONE_IS_PORTRAIT = false ;
|
||||
|
||||
@ -113,17 +100,18 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
|
||||
" -- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
|
||||
// Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
|
||||
// We will define some constants and conversions here
|
||||
// We will define some constants and conversions here. These are useful for the Freight Frenzy field.
|
||||
private static final float mmPerInch = 25.4f;
|
||||
private static final float mmTargetHeight = (6) * mmPerInch; // the height of the center of the target image above the floor
|
||||
|
||||
// Constants for perimeter targets
|
||||
private static final float halfField = 72 * mmPerInch;
|
||||
private static final float quadField = 36 * mmPerInch;
|
||||
private static final float mmTargetHeight = 6 * mmPerInch; // the height of the center of the target image above the floor
|
||||
private static final float halfField = 72 * mmPerInch;
|
||||
private static final float halfTile = 12 * mmPerInch;
|
||||
private static final float oneAndHalfTile = 36 * mmPerInch;
|
||||
|
||||
// Class Members
|
||||
private OpenGLMatrix lastLocation = null;
|
||||
private VuforiaLocalizer vuforia = null;
|
||||
private VuforiaLocalizer vuforia = null;
|
||||
private VuforiaTrackables targets = null ;
|
||||
|
||||
private boolean targetVisible = false;
|
||||
private float phoneXRotate = 0;
|
||||
private float phoneYRotate = 0;
|
||||
@ -132,18 +120,17 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
|
||||
@Override public void runOpMode() {
|
||||
/*
|
||||
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
|
||||
* We can pass Vuforia the handle to a camera preview resource (on the RC phone);
|
||||
* If no camera monitor is desired, use the parameter-less constructor instead (commented out below).
|
||||
* To get an on-phone camera preview, use the code below.
|
||||
* If no camera preview is desired, use the parameter-less constructor instead (commented out below).
|
||||
*/
|
||||
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
|
||||
|
||||
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
parameters.vuforiaLicenseKey = VUFORIA_KEY;
|
||||
parameters.cameraDirection = CAMERA_CHOICE;
|
||||
|
||||
// Make sure extended tracking is disabled for this example.
|
||||
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
|
||||
parameters.useExtendedTracking = false;
|
||||
|
||||
// Instantiate the Vuforia engine
|
||||
@ -151,21 +138,11 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
|
||||
|
||||
// Load the data sets for the trackable objects. These particular data
|
||||
// sets are stored in the 'assets' part of our application.
|
||||
VuforiaTrackables targetsUltimateGoal = this.vuforia.loadTrackablesFromAsset("UltimateGoal");
|
||||
VuforiaTrackable blueTowerGoalTarget = targetsUltimateGoal.get(0);
|
||||
blueTowerGoalTarget.setName("Blue Tower Goal Target");
|
||||
VuforiaTrackable redTowerGoalTarget = targetsUltimateGoal.get(1);
|
||||
redTowerGoalTarget.setName("Red Tower Goal Target");
|
||||
VuforiaTrackable redAllianceTarget = targetsUltimateGoal.get(2);
|
||||
redAllianceTarget.setName("Red Alliance Target");
|
||||
VuforiaTrackable blueAllianceTarget = targetsUltimateGoal.get(3);
|
||||
blueAllianceTarget.setName("Blue Alliance Target");
|
||||
VuforiaTrackable frontWallTarget = targetsUltimateGoal.get(4);
|
||||
frontWallTarget.setName("Front Wall Target");
|
||||
targets = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
|
||||
|
||||
// For convenience, gather together all the trackable objects in one easily-iterable collection */
|
||||
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
|
||||
allTrackables.addAll(targetsUltimateGoal);
|
||||
allTrackables.addAll(targets);
|
||||
|
||||
/**
|
||||
* In order for localization to work, we need to tell the system where each target is on the field, and
|
||||
@ -185,41 +162,28 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
|
||||
* coordinate system (the center of the field), facing up.
|
||||
*/
|
||||
|
||||
//Set the position of the perimeter targets with relation to origin (center of field)
|
||||
redAllianceTarget.setLocation(OpenGLMatrix
|
||||
.translation(0, -halfField, mmTargetHeight)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180)));
|
||||
// Name and locate each trackable object
|
||||
identifyTarget(0, "Blue Storage", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
|
||||
identifyTarget(1, "Blue Alliance Wall", halfTile, halfField, mmTargetHeight, 90, 0, 0);
|
||||
identifyTarget(2, "Red Storage", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
|
||||
identifyTarget(3, "Red Alliance Wall", halfTile, -halfField, mmTargetHeight, 90, 0, 180);
|
||||
|
||||
blueAllianceTarget.setLocation(OpenGLMatrix
|
||||
.translation(0, halfField, mmTargetHeight)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0)));
|
||||
frontWallTarget.setLocation(OpenGLMatrix
|
||||
.translation(-halfField, 0, mmTargetHeight)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , 90)));
|
||||
/*
|
||||
* Create a transformation matrix describing where the phone is on the robot.
|
||||
*
|
||||
* NOTE !!!! It's very important that you turn OFF your phone's Auto-Screen-Rotation option.
|
||||
* Lock it into Portrait for these numbers to work.
|
||||
*
|
||||
* Info: The coordinate frame for the robot looks the same as the field.
|
||||
* The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
|
||||
* Z is UP on the robot. This equates to a heading angle of Zero degrees.
|
||||
*
|
||||
* The phone starts out lying flat, with the screen facing Up and with the physical top of the phone
|
||||
* pointing to the LEFT side of the Robot.
|
||||
* The two examples below assume that the camera is facing forward out the front of the robot.
|
||||
*/
|
||||
|
||||
// The tower goal targets are located a quarter field length from the ends of the back perimeter wall.
|
||||
blueTowerGoalTarget.setLocation(OpenGLMatrix
|
||||
.translation(halfField, quadField, mmTargetHeight)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0 , -90)));
|
||||
redTowerGoalTarget.setLocation(OpenGLMatrix
|
||||
.translation(halfField, -quadField, mmTargetHeight)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90)));
|
||||
|
||||
//
|
||||
// Create a transformation matrix describing where the phone is on the robot.
|
||||
//
|
||||
// NOTE !!!! It's very important that you turn OFF your phone's Auto-Screen-Rotation option.
|
||||
// Lock it into Portrait for these numbers to work.
|
||||
//
|
||||
// Info: The coordinate frame for the robot looks the same as the field.
|
||||
// The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
|
||||
// Z is UP on the robot. This equates to a bearing angle of Zero degrees.
|
||||
//
|
||||
// The phone starts out lying flat, with the screen facing Up and with the physical top of the phone
|
||||
// pointing to the LEFT side of the Robot.
|
||||
// The two examples below assume that the camera is facing forward out the front of the robot.
|
||||
|
||||
// We need to rotate the camera around it's long axis to bring the correct camera forward.
|
||||
// We need to rotate the camera around its long axis to bring the correct camera forward.
|
||||
if (CAMERA_CHOICE == BACK) {
|
||||
phoneYRotate = -90;
|
||||
} else {
|
||||
@ -232,10 +196,10 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
|
||||
}
|
||||
|
||||
// Next, translate the camera lens to where it is on the robot.
|
||||
// In this example, it is centered (left to right), but forward of the middle of the robot, and above ground level.
|
||||
final float CAMERA_FORWARD_DISPLACEMENT = 4.0f * mmPerInch; // eg: Camera is 4 Inches in front of robot center
|
||||
final float CAMERA_VERTICAL_DISPLACEMENT = 8.0f * mmPerInch; // eg: Camera is 8 Inches above ground
|
||||
final float CAMERA_LEFT_DISPLACEMENT = 0; // eg: Camera is ON the robot's center line
|
||||
// In this example, it is centered on the robot (left-to-right and front-to-back), and 6 inches above ground level.
|
||||
final float CAMERA_FORWARD_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the forward distance from the center of the robot to the camera lens
|
||||
final float CAMERA_VERTICAL_DISPLACEMENT = 6.0f * mmPerInch; // eg: Camera is 6 Inches above ground
|
||||
final float CAMERA_LEFT_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the left distance from the center of the robot to the camera lens
|
||||
|
||||
OpenGLMatrix robotFromCamera = OpenGLMatrix
|
||||
.translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
|
||||
@ -246,19 +210,24 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
|
||||
((VuforiaTrackableDefaultListener) trackable.getListener()).setPhoneInformation(robotFromCamera, parameters.cameraDirection);
|
||||
}
|
||||
|
||||
// WARNING:
|
||||
// In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
|
||||
// This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample.
|
||||
// CONSEQUENTLY do not put any driving commands in this loop.
|
||||
// To restore the normal opmode structure, just un-comment the following line:
|
||||
/*
|
||||
* WARNING:
|
||||
* In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
|
||||
* This sequence is used to enable the new remote DS Camera Stream feature to be used with this sample.
|
||||
* CONSEQUENTLY do not put any driving commands in this loop.
|
||||
* To restore the normal opmode structure, just un-comment the following line:
|
||||
*/
|
||||
|
||||
// waitForStart();
|
||||
|
||||
// Note: To use the remote camera preview:
|
||||
// AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
|
||||
// Tap the preview window to receive a fresh image.
|
||||
/* Note: To use the remote camera preview:
|
||||
* AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
|
||||
* Tap the preview window to receive a fresh image.
|
||||
* It is not permitted to transition to RUN while the camera preview window is active.
|
||||
* Either press STOP to exit the OpMode, or use the "options menu" again, and select "Camera Stream" to close the preview window.
|
||||
*/
|
||||
|
||||
targetsUltimateGoal.activate();
|
||||
targets.activate();
|
||||
while (!isStopRequested()) {
|
||||
|
||||
// check all the trackable targets to see which one (if any) is visible.
|
||||
@ -282,7 +251,7 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
|
||||
if (targetVisible) {
|
||||
// express position (translation) of robot in inches.
|
||||
VectorF translation = lastLocation.getTranslation();
|
||||
telemetry.addData("Pos (in)", "{X, Y, Z} = %.1f, %.1f, %.1f",
|
||||
telemetry.addData("Pos (inches)", "{X, Y, Z} = %.1f, %.1f, %.1f",
|
||||
translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
|
||||
|
||||
// express the rotation of the robot in degrees.
|
||||
@ -296,6 +265,20 @@ public class ConceptVuforiaUltimateGoalNavigation extends LinearOpMode {
|
||||
}
|
||||
|
||||
// Disable Tracking when we are done;
|
||||
targetsUltimateGoal.deactivate();
|
||||
targets.deactivate();
|
||||
}
|
||||
|
||||
/***
|
||||
* Identify a target by naming it, and setting its position and orientation on the field
|
||||
* @param targetIndex
|
||||
* @param targetName
|
||||
* @param dx, dy, dz Target offsets in x,y,z axes
|
||||
* @param rx, ry, rz Target rotations in x,y,z axes
|
||||
*/
|
||||
void identifyTarget(int targetIndex, String targetName, float dx, float dy, float dz, float rx, float ry, float rz) {
|
||||
VuforiaTrackable aTarget = targets.get(targetIndex);
|
||||
aTarget.setName(targetName);
|
||||
aTarget.setLocation(OpenGLMatrix.translation(dx, dy, dz)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, rx, ry, rz)));
|
||||
}
|
||||
}
|
@ -52,29 +52,21 @@ import static org.firstinspires.ftc.robotcore.external.navigation.AxesOrder.XZY;
|
||||
import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.EXTRINSIC;
|
||||
|
||||
/**
|
||||
* This 2020-2021 OpMode illustrates the basics of using the Vuforia localizer to determine
|
||||
* positioning and orientation of robot on the ULTIMATE GOAL FTC field.
|
||||
* The code is structured as a LinearOpMode
|
||||
* This OpMode illustrates using the Vuforia localizer to determine positioning and orientation of
|
||||
* robot on the FTC field using a WEBCAM. The code is structured as a LinearOpMode
|
||||
*
|
||||
* NOTE: If you are running on a Phone with a built-in camera, use the ConceptVuforiaFieldNavigation example instead of this one.
|
||||
* NOTE: It is possible to switch between multiple WebCams (eg: one for the left side and one for the right).
|
||||
* For a related example of how to do this, see ConceptTensorFlowObjectDetectionSwitchableCameras
|
||||
*
|
||||
* When images are located, Vuforia is able to determine the position and orientation of the
|
||||
* image relative to the camera. This sample code then combines that information with a
|
||||
* knowledge of where the target images are on the field, to determine the location of the camera.
|
||||
*
|
||||
* From the Audience perspective, the Red Alliance station is on the right and the
|
||||
* Blue Alliance Station is on the left.
|
||||
|
||||
* There are a total of five image targets for the ULTIMATE GOAL game.
|
||||
* Three of the targets are placed in the center of the Red Alliance, Audience (Front),
|
||||
* and Blue Alliance perimeter walls.
|
||||
* Two additional targets are placed on the perimeter wall, one in front of each Tower Goal.
|
||||
* Refer to the Field Setup manual for more specific location details
|
||||
*
|
||||
* A final calculation then uses the location of the camera on the robot to determine the
|
||||
* Finally, the location of the camera on the robot is used to determine the
|
||||
* robot's location and orientation on the field.
|
||||
*
|
||||
* @see VuforiaLocalizer
|
||||
* @see VuforiaTrackableDefaultListener
|
||||
* see ultimategoal/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
|
||||
* To learn more about the FTC field coordinate model, see FTC_FieldCoordinateSystemDefinition.pdf in this folder
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
|
||||
@ -83,10 +75,9 @@ import static org.firstinspires.ftc.robotcore.external.navigation.AxesReference.
|
||||
* is explained below.
|
||||
*/
|
||||
|
||||
|
||||
@TeleOp(name="ULTIMATEGOAL Vuforia Nav Webcam", group ="Concept")
|
||||
@TeleOp(name="Vuforia Field Nav Webcam", group ="Concept")
|
||||
@Disabled
|
||||
public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
|
||||
public class ConceptVuforiaFieldNavigationWebcam extends LinearOpMode {
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
@ -106,52 +97,39 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
|
||||
// Since ImageTarget trackables use mm to specifiy their dimensions, we must use mm for all the physical dimension.
|
||||
// We will define some constants and conversions here
|
||||
private static final float mmPerInch = 25.4f;
|
||||
private static final float mmTargetHeight = (6) * mmPerInch; // the height of the center of the target image above the floor
|
||||
|
||||
// Constants for perimeter targets
|
||||
private static final float halfField = 72 * mmPerInch;
|
||||
private static final float quadField = 36 * mmPerInch;
|
||||
private static final float mmTargetHeight = 6 * mmPerInch; // the height of the center of the target image above the floor
|
||||
private static final float halfField = 72 * mmPerInch;
|
||||
private static final float halfTile = 12 * mmPerInch;
|
||||
private static final float oneAndHalfTile = 36 * mmPerInch;
|
||||
|
||||
// Class Members
|
||||
private OpenGLMatrix lastLocation = null;
|
||||
private VuforiaLocalizer vuforia = null;
|
||||
private OpenGLMatrix lastLocation = null;
|
||||
private VuforiaLocalizer vuforia = null;
|
||||
private VuforiaTrackables targets = null ;
|
||||
private WebcamName webcamName = null;
|
||||
|
||||
/**
|
||||
* This is the webcam we are to use. As with other hardware devices such as motors and
|
||||
* servos, this device is identified using the robot configuration tool in the FTC application.
|
||||
*/
|
||||
WebcamName webcamName = null;
|
||||
|
||||
private boolean targetVisible = false;
|
||||
private float phoneXRotate = 0;
|
||||
private float phoneYRotate = 0;
|
||||
private float phoneZRotate = 0;
|
||||
private boolean targetVisible = false;
|
||||
|
||||
@Override public void runOpMode() {
|
||||
/*
|
||||
* Retrieve the camera we are to use.
|
||||
*/
|
||||
// Connect to the camera we are to use. This name must match what is set up in Robot Configuration
|
||||
webcamName = hardwareMap.get(WebcamName.class, "Webcam 1");
|
||||
|
||||
/*
|
||||
* Configure Vuforia by creating a Parameter object, and passing it to the Vuforia engine.
|
||||
* We can pass Vuforia the handle to a camera preview resource (on the RC screen);
|
||||
* If no camera monitor is desired, use the parameter-less constructor instead (commented out below).
|
||||
* If no camera-preview is desired, use the parameter-less constructor instead (commented out below).
|
||||
* Note: A preview window is required if you want to view the camera stream on the Driver Station Phone.
|
||||
*/
|
||||
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
|
||||
|
||||
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
parameters.vuforiaLicenseKey = VUFORIA_KEY;
|
||||
|
||||
/**
|
||||
* We also indicate which camera on the RC we wish to use.
|
||||
*/
|
||||
// We also indicate which camera we wish to use.
|
||||
parameters.cameraName = webcamName;
|
||||
|
||||
// Make sure extended tracking is disabled for this example.
|
||||
// Turn off Extended tracking. Set this true if you want Vuforia to track beyond the target.
|
||||
parameters.useExtendedTracking = false;
|
||||
|
||||
// Instantiate the Vuforia engine
|
||||
@ -159,21 +137,11 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
|
||||
|
||||
// Load the data sets for the trackable objects. These particular data
|
||||
// sets are stored in the 'assets' part of our application.
|
||||
VuforiaTrackables targetsUltimateGoal = this.vuforia.loadTrackablesFromAsset("UltimateGoal");
|
||||
VuforiaTrackable blueTowerGoalTarget = targetsUltimateGoal.get(0);
|
||||
blueTowerGoalTarget.setName("Blue Tower Goal Target");
|
||||
VuforiaTrackable redTowerGoalTarget = targetsUltimateGoal.get(1);
|
||||
redTowerGoalTarget.setName("Red Tower Goal Target");
|
||||
VuforiaTrackable redAllianceTarget = targetsUltimateGoal.get(2);
|
||||
redAllianceTarget.setName("Red Alliance Target");
|
||||
VuforiaTrackable blueAllianceTarget = targetsUltimateGoal.get(3);
|
||||
blueAllianceTarget.setName("Blue Alliance Target");
|
||||
VuforiaTrackable frontWallTarget = targetsUltimateGoal.get(4);
|
||||
frontWallTarget.setName("Front Wall Target");
|
||||
targets = this.vuforia.loadTrackablesFromAsset("FreightFrenzy");
|
||||
|
||||
// For convenience, gather together all the trackable objects in one easily-iterable collection */
|
||||
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
|
||||
allTrackables.addAll(targetsUltimateGoal);
|
||||
allTrackables.addAll(targets);
|
||||
|
||||
/**
|
||||
* In order for localization to work, we need to tell the system where each target is on the field, and
|
||||
@ -193,71 +161,63 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
|
||||
* coordinate system (the center of the field), facing up.
|
||||
*/
|
||||
|
||||
//Set the position of the perimeter targets with relation to origin (center of field)
|
||||
redAllianceTarget.setLocation(OpenGLMatrix
|
||||
.translation(0, -halfField, mmTargetHeight)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 180)));
|
||||
// Name and locate each trackable object
|
||||
identifyTarget(0, "Blue Storage", -halfField, oneAndHalfTile, mmTargetHeight, 90, 0, 90);
|
||||
identifyTarget(1, "Blue Alliance Wall", halfTile, halfField, mmTargetHeight, 90, 0, 0);
|
||||
identifyTarget(2, "Red Storage", -halfField, -oneAndHalfTile, mmTargetHeight, 90, 0, 90);
|
||||
identifyTarget(3, "Red Alliance Wall", halfTile, -halfField, mmTargetHeight, 90, 0, 180);
|
||||
|
||||
blueAllianceTarget.setLocation(OpenGLMatrix
|
||||
.translation(0, halfField, mmTargetHeight)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 0)));
|
||||
frontWallTarget.setLocation(OpenGLMatrix
|
||||
.translation(-halfField, 0, mmTargetHeight)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, 90)));
|
||||
/*
|
||||
* Create a transformation matrix describing where the camera is on the robot.
|
||||
*
|
||||
* Info: The coordinate frame for the robot looks the same as the field.
|
||||
* The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
|
||||
* Z is UP on the robot. This equates to a bearing angle of Zero degrees.
|
||||
*
|
||||
* For a WebCam, the default starting orientation of the camera is looking UP (pointing in the Z direction),
|
||||
* with the wide (horizontal) axis of the camera aligned with the X axis, and
|
||||
* the Narrow (vertical) axis of the camera aligned with the Y axis
|
||||
*
|
||||
* But, this example assumes that the camera is actually facing forward out the front of the robot.
|
||||
* So, the "default" camera position requires two rotations to get it oriented correctly.
|
||||
* 1) First it must be rotated +90 degrees around the X axis to get it horizontal (its now facing out the right side of the robot)
|
||||
* 2) Next it must be be rotated +90 degrees (counter-clockwise) around the Z axis to face forward.
|
||||
*
|
||||
* Finally the camera can be translated to its actual mounting position on the robot.
|
||||
* In this example, it is centered on the robot (left-to-right and front-to-back), and 6 inches above ground level.
|
||||
*/
|
||||
|
||||
// The tower goal targets are located a quarter field length from the ends of the back perimeter wall.
|
||||
blueTowerGoalTarget.setLocation(OpenGLMatrix
|
||||
.translation(halfField, quadField, mmTargetHeight)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90)));
|
||||
redTowerGoalTarget.setLocation(OpenGLMatrix
|
||||
.translation(halfField, -quadField, mmTargetHeight)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, 90, 0, -90)));
|
||||
|
||||
//
|
||||
// Create a transformation matrix describing where the phone is on the robot.
|
||||
//
|
||||
// Info: The coordinate frame for the robot looks the same as the field.
|
||||
// The robot's "forward" direction is facing out along X axis, with the LEFT side facing out along the Y axis.
|
||||
// Z is UP on the robot. This equates to a bearing angle of Zero degrees.
|
||||
//
|
||||
// For a WebCam, the default starting orientation of the camera is looking UP (pointing in the Z direction),
|
||||
// with the wide (horizontal) axis of the camera aligned with the X axis, and
|
||||
// the Narrow (vertical) axis of the camera aligned with the Y axis
|
||||
//
|
||||
// But, this example assumes that the camera is actually facing forward out the front of the robot.
|
||||
// So, the "default" camera position requires two rotations to get it oriented correctly.
|
||||
// 1) First it must be rotated +90 degrees around the X axis to get it horizontal (it's now facing out the right side of the robot)
|
||||
// 2) Next it must be be rotated +90 degrees (counter-clockwise) around the Z axis to face forward.
|
||||
//
|
||||
// Finally the camera can be translated to its actual mounting position on the robot.
|
||||
// In this example, it is centered (left to right), but 4" forward of the middle of the robot, and 8" above ground level.
|
||||
|
||||
final float CAMERA_FORWARD_DISPLACEMENT = 4.0f * mmPerInch; // eg: Camera is 4 Inches in front of robot-center
|
||||
final float CAMERA_VERTICAL_DISPLACEMENT = 8.0f * mmPerInch; // eg: Camera is 8 Inches above ground
|
||||
final float CAMERA_LEFT_DISPLACEMENT = 0; // eg: Camera is ON the robot's center line
|
||||
final float CAMERA_FORWARD_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the forward distance from the center of the robot to the camera lens
|
||||
final float CAMERA_VERTICAL_DISPLACEMENT = 6.0f * mmPerInch; // eg: Camera is 6 Inches above ground
|
||||
final float CAMERA_LEFT_DISPLACEMENT = 0.0f * mmPerInch; // eg: Enter the left distance from the center of the robot to the camera lens
|
||||
|
||||
OpenGLMatrix cameraLocationOnRobot = OpenGLMatrix
|
||||
.translation(CAMERA_FORWARD_DISPLACEMENT, CAMERA_LEFT_DISPLACEMENT, CAMERA_VERTICAL_DISPLACEMENT)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XZY, DEGREES, 90, 90, 0));
|
||||
|
||||
/** Let all the trackable listeners know where the phone is. */
|
||||
/** Let all the trackable listeners know where the camera is. */
|
||||
for (VuforiaTrackable trackable : allTrackables) {
|
||||
((VuforiaTrackableDefaultListener) trackable.getListener()).setCameraLocationOnRobot(parameters.cameraName, cameraLocationOnRobot);
|
||||
}
|
||||
|
||||
// WARNING:
|
||||
// In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
|
||||
// This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample.
|
||||
// CONSEQUENTLY do not put any driving commands in this loop.
|
||||
// To restore the normal opmode structure, just un-comment the following line:
|
||||
/*
|
||||
* WARNING:
|
||||
* In this sample, we do not wait for PLAY to be pressed. Target Tracking is started immediately when INIT is pressed.
|
||||
* This sequence is used to enable the new remote DS Camera Preview feature to be used with this sample.
|
||||
* CONSEQUENTLY do not put any driving commands in this loop.
|
||||
* To restore the normal opmode structure, just un-comment the following line:
|
||||
*/
|
||||
|
||||
// waitForStart();
|
||||
|
||||
// Note: To use the remote camera preview:
|
||||
// AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
|
||||
// Tap the preview window to receive a fresh image.
|
||||
/* Note: To use the remote camera preview:
|
||||
* AFTER you hit Init on the Driver Station, use the "options menu" to select "Camera Stream"
|
||||
* Tap the preview window to receive a fresh image.
|
||||
* It is not permitted to transition to RUN while the camera preview window is active.
|
||||
* Either press STOP to exit the OpMode, or use the "options menu" again, and select "Camera Stream" to close the preview window.
|
||||
*/
|
||||
|
||||
targetsUltimateGoal.activate();
|
||||
targets.activate();
|
||||
while (!isStopRequested()) {
|
||||
|
||||
// check all the trackable targets to see which one (if any) is visible.
|
||||
@ -281,7 +241,7 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
|
||||
if (targetVisible) {
|
||||
// express position (translation) of robot in inches.
|
||||
VectorF translation = lastLocation.getTranslation();
|
||||
telemetry.addData("Pos (in)", "{X, Y, Z} = %.1f, %.1f, %.1f",
|
||||
telemetry.addData("Pos (inches)", "{X, Y, Z} = %.1f, %.1f, %.1f",
|
||||
translation.get(0) / mmPerInch, translation.get(1) / mmPerInch, translation.get(2) / mmPerInch);
|
||||
|
||||
// express the rotation of the robot in degrees.
|
||||
@ -295,6 +255,20 @@ public class ConceptVuforiaUltimateGoalNavigationWebcam extends LinearOpMode {
|
||||
}
|
||||
|
||||
// Disable Tracking when we are done;
|
||||
targetsUltimateGoal.deactivate();
|
||||
targets.deactivate();
|
||||
}
|
||||
|
||||
/***
|
||||
* Identify a target by naming it, and setting its position and orientation on the field
|
||||
* @param targetIndex
|
||||
* @param targetName
|
||||
* @param dx, dy, dz Target offsets in x,y,z axes
|
||||
* @param rx, ry, rz Target rotations in x,y,z axes
|
||||
*/
|
||||
void identifyTarget(int targetIndex, String targetName, float dx, float dy, float dz, float rx, float ry, float rz) {
|
||||
VuforiaTrackable aTarget = targets.get(targetIndex);
|
||||
aTarget.setName(targetName);
|
||||
aTarget.setLocation(OpenGLMatrix.translation(dx, dy, dz)
|
||||
.multiplied(Orientation.getRotationMatrix(EXTRINSIC, XYZ, DEGREES, rx, ry, rz)));
|
||||
}
|
||||
}
|
@ -1,336 +0,0 @@
|
||||
/* Copyright (c) 2017 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.util.RobotLog;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.MatrixF;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* This 2016-2017 OpMode illustrates the basics of using the Vuforia localizer to determine
|
||||
* positioning and orientation of robot on the FTC field.
|
||||
* The code is structured as a LinearOpMode
|
||||
*
|
||||
* Vuforia uses the phone's camera to inspect it's surroundings, and attempt to locate target images.
|
||||
*
|
||||
* When images are located, Vuforia is able to determine the position and orientation of the
|
||||
* image relative to the camera. This sample code than combines that information with a
|
||||
* knowledge of where the target images are on the field, to determine the location of the camera.
|
||||
*
|
||||
* This example assumes a "diamond" field configuration where the red and blue alliance stations
|
||||
* are adjacent on the corner of the field furthest from the audience.
|
||||
* From the Audience perspective, the Red driver station is on the right.
|
||||
* The two vision target are located on the two walls closest to the audience, facing in.
|
||||
* The Stones are on the RED side of the field, and the Chips are on the Blue side.
|
||||
*
|
||||
* A final calculation then uses the location of the camera on the robot to determine the
|
||||
* robot's location and orientation on the field.
|
||||
*
|
||||
* @see VuforiaLocalizer
|
||||
* @see VuforiaTrackableDefaultListener
|
||||
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
|
||||
*
|
||||
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
|
||||
* is explained below.
|
||||
*/
|
||||
|
||||
@TeleOp(name="Concept: Vuforia Navigation", group ="Concept")
|
||||
@Disabled
|
||||
public class ConceptVuforiaNavigation extends LinearOpMode {
|
||||
|
||||
public static final String TAG = "Vuforia Navigation Sample";
|
||||
|
||||
OpenGLMatrix lastLocation = null;
|
||||
|
||||
/**
|
||||
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
|
||||
* localization engine.
|
||||
*/
|
||||
VuforiaLocalizer vuforia;
|
||||
|
||||
@Override public void runOpMode() {
|
||||
/*
|
||||
* To start up Vuforia, tell it the view that we wish to use for camera monitor (on the RC phone);
|
||||
* If no camera monitor is desired, use the parameterless constructor instead (commented out below).
|
||||
*/
|
||||
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
|
||||
|
||||
// OR... Do Not Activate the Camera Monitor View, to save power
|
||||
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
|
||||
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
|
||||
* web site at https://developer.vuforia.com/license-manager.
|
||||
*
|
||||
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
|
||||
* random data. As an example, here is a example of a fragment of a valid key:
|
||||
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
|
||||
* Once you've obtained a license key, copy the string from the Vuforia web site
|
||||
* and paste it in to your code on the next line, between the double quotes.
|
||||
*/
|
||||
parameters.vuforiaLicenseKey = " -- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
|
||||
/*
|
||||
* We also indicate which camera on the RC that we wish to use.
|
||||
* Here we chose the back (HiRes) camera (for greater range), but
|
||||
* for a competition robot, the front camera might be more convenient.
|
||||
*/
|
||||
parameters.cameraDirection = VuforiaLocalizer.CameraDirection.BACK;
|
||||
|
||||
/**
|
||||
* Instantiate the Vuforia engine
|
||||
*/
|
||||
vuforia = ClassFactory.getInstance().createVuforia(parameters);
|
||||
|
||||
/**
|
||||
* Load the data sets that for the trackable objects we wish to track. These particular data
|
||||
* sets are stored in the 'assets' part of our application (you'll see them in the Android
|
||||
* Studio 'Project' view over there on the left of the screen). You can make your own datasets
|
||||
* with the Vuforia Target Manager: https://developer.vuforia.com/target-manager. PDFs for the
|
||||
* example "StonesAndChips", datasets can be found in in this project in the
|
||||
* documentation directory.
|
||||
*/
|
||||
VuforiaTrackables stonesAndChips = this.vuforia.loadTrackablesFromAsset("StonesAndChips");
|
||||
VuforiaTrackable redTarget = stonesAndChips.get(0);
|
||||
redTarget.setName("RedTarget"); // Stones
|
||||
|
||||
VuforiaTrackable blueTarget = stonesAndChips.get(1);
|
||||
blueTarget.setName("BlueTarget"); // Chips
|
||||
|
||||
/** For convenience, gather together all the trackable objects in one easily-iterable collection */
|
||||
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
|
||||
allTrackables.addAll(stonesAndChips);
|
||||
|
||||
/**
|
||||
* We use units of mm here because that's the recommended units of measurement for the
|
||||
* size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
|
||||
* <ImageTarget name="stones" size="247 173"/>
|
||||
* You don't *have to* use mm here, but the units here and the units used in the XML
|
||||
* target configuration files *must* correspond for the math to work out correctly.
|
||||
*/
|
||||
float mmPerInch = 25.4f;
|
||||
float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
|
||||
float mmFTCFieldWidth = (12*12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
|
||||
|
||||
/**
|
||||
* In order for localization to work, we need to tell the system where each target we
|
||||
* wish to use for navigation resides on the field, and we need to specify where on the robot
|
||||
* the phone resides. These specifications are in the form of <em>transformation matrices.</em>
|
||||
* Transformation matrices are a central, important concept in the math here involved in localization.
|
||||
* See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
|
||||
* for detailed information. Commonly, you'll encounter transformation matrices as instances
|
||||
* of the {@link OpenGLMatrix} class.
|
||||
*
|
||||
* For the most part, you don't need to understand the details of the math of how transformation
|
||||
* matrices work inside (as fascinating as that is, truly). Just remember these key points:
|
||||
* <ol>
|
||||
*
|
||||
* <li>You can put two transformations together to produce a third that combines the effect of
|
||||
* both of them. If, for example, you have a rotation transform R and a translation transform T,
|
||||
* then the combined transformation matrix RT which does the rotation first and then the translation
|
||||
* is given by {@code RT = T.multiplied(R)}. That is, the transforms are multiplied in the
|
||||
* <em>reverse</em> of the chronological order in which they applied.</li>
|
||||
*
|
||||
* <li>A common way to create useful transforms is to use methods in the {@link OpenGLMatrix}
|
||||
* class and the Orientation class. See, for example, {@link OpenGLMatrix#translation(float,
|
||||
* float, float)}, {@link OpenGLMatrix#rotation(AngleUnit, float, float, float, float)}, and
|
||||
* {@link Orientation#getRotationMatrix(AxesReference, AxesOrder, AngleUnit, float, float, float)}.
|
||||
* Related methods in {@link OpenGLMatrix}, such as {@link OpenGLMatrix#rotated(AngleUnit,
|
||||
* float, float, float, float)}, are syntactic shorthands for creating a new transform and
|
||||
* then immediately multiplying the receiver by it, which can be convenient at times.</li>
|
||||
*
|
||||
* <li>If you want to break open the black box of a transformation matrix to understand
|
||||
* what it's doing inside, use {@link MatrixF#getTranslation()} to fetch how much the
|
||||
* transform will move you in x, y, and z, and use {@link Orientation#getOrientation(MatrixF,
|
||||
* AxesReference, AxesOrder, AngleUnit)} to determine the rotational motion that the transform
|
||||
* will impart. See {@link #format(OpenGLMatrix)} below for an example.</li>
|
||||
*
|
||||
* </ol>
|
||||
*
|
||||
* This example places the "stones" image on the perimeter wall to the Left
|
||||
* of the Red Driver station wall. Similar to the Red Beacon Location on the Res-Q
|
||||
*
|
||||
* This example places the "chips" image on the perimeter wall to the Right
|
||||
* of the Blue Driver station. Similar to the Blue Beacon Location on the Res-Q
|
||||
*
|
||||
* See the doc folder of this project for a description of the field Axis conventions.
|
||||
*
|
||||
* Initially the target is conceptually lying at the origin of the field's coordinate system
|
||||
* (the center of the field), facing up.
|
||||
*
|
||||
* In this configuration, the target's coordinate system aligns with that of the field.
|
||||
*
|
||||
* In a real situation we'd also account for the vertical (Z) offset of the target,
|
||||
* but for simplicity, we ignore that here; for a real robot, you'll want to fix that.
|
||||
*
|
||||
* To place the Stones Target on the Red Audience wall:
|
||||
* - First we rotate it 90 around the field's X axis to flip it upright
|
||||
* - Then we rotate it 90 around the field's Z access to face it away from the audience.
|
||||
* - Finally, we translate it back along the X axis towards the red audience wall.
|
||||
*/
|
||||
OpenGLMatrix redTargetLocationOnField = OpenGLMatrix
|
||||
/* Then we translate the target off to the RED WALL. Our translation here
|
||||
is a negative translation in X.*/
|
||||
.translation(-mmFTCFieldWidth/2, 0, 0)
|
||||
.multiplied(Orientation.getRotationMatrix(
|
||||
/* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
|
||||
AxesReference.EXTRINSIC, AxesOrder.XZX,
|
||||
AngleUnit.DEGREES, 90, 90, 0));
|
||||
redTarget.setLocation(redTargetLocationOnField);
|
||||
RobotLog.ii(TAG, "Red Target=%s", format(redTargetLocationOnField));
|
||||
|
||||
/*
|
||||
* To place the Stones Target on the Blue Audience wall:
|
||||
* - First we rotate it 90 around the field's X axis to flip it upright
|
||||
* - Finally, we translate it along the Y axis towards the blue audience wall.
|
||||
*/
|
||||
OpenGLMatrix blueTargetLocationOnField = OpenGLMatrix
|
||||
/* Then we translate the target off to the Blue Audience wall.
|
||||
Our translation here is a positive translation in Y.*/
|
||||
.translation(0, mmFTCFieldWidth/2, 0)
|
||||
.multiplied(Orientation.getRotationMatrix(
|
||||
/* First, in the fixed (field) coordinate system, we rotate 90deg in X */
|
||||
AxesReference.EXTRINSIC, AxesOrder.XZX,
|
||||
AngleUnit.DEGREES, 90, 0, 0));
|
||||
blueTarget.setLocation(blueTargetLocationOnField);
|
||||
RobotLog.ii(TAG, "Blue Target=%s", format(blueTargetLocationOnField));
|
||||
|
||||
/**
|
||||
* Create a transformation matrix describing where the phone is on the robot. Here, we
|
||||
* put the phone on the right hand side of the robot with the screen facing in (see our
|
||||
* choice of BACK camera above) and in landscape mode. Starting from alignment between the
|
||||
* robot's and phone's axes, this is a rotation of -90deg along the Y axis.
|
||||
*
|
||||
* When determining whether a rotation is positive or negative, consider yourself as looking
|
||||
* down the (positive) axis of rotation from the positive towards the origin. Positive rotations
|
||||
* are then CCW, and negative rotations CW. An example: consider looking down the positive Z
|
||||
* axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
|
||||
* plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
|
||||
*/
|
||||
OpenGLMatrix phoneLocationOnRobot = OpenGLMatrix
|
||||
.translation(mmBotWidth/2,0,0)
|
||||
.multiplied(Orientation.getRotationMatrix(
|
||||
AxesReference.EXTRINSIC, AxesOrder.YZY,
|
||||
AngleUnit.DEGREES, -90, 0, 0));
|
||||
RobotLog.ii(TAG, "phone=%s", format(phoneLocationOnRobot));
|
||||
|
||||
/**
|
||||
* Let the trackable listeners we care about know where the phone is. We know that each
|
||||
* listener is a {@link VuforiaTrackableDefaultListener} and can so safely cast because
|
||||
* we have not ourselves installed a listener of a different type.
|
||||
*/
|
||||
((VuforiaTrackableDefaultListener)redTarget.getListener()).setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
|
||||
((VuforiaTrackableDefaultListener)blueTarget.getListener()).setPhoneInformation(phoneLocationOnRobot, parameters.cameraDirection);
|
||||
|
||||
/**
|
||||
* A brief tutorial: here's how all the math is going to work:
|
||||
*
|
||||
* C = phoneLocationOnRobot maps phone coords -> robot coords
|
||||
* P = tracker.getPose() maps image target coords -> phone coords
|
||||
* L = redTargetLocationOnField maps image target coords -> field coords
|
||||
*
|
||||
* So
|
||||
*
|
||||
* C.inverted() maps robot coords -> phone coords
|
||||
* P.inverted() maps phone coords -> imageTarget coords
|
||||
*
|
||||
* Putting that all together,
|
||||
*
|
||||
* L x P.inverted() x C.inverted() maps robot coords to field coords.
|
||||
*
|
||||
* @see VuforiaTrackableDefaultListener#getRobotLocation()
|
||||
*/
|
||||
|
||||
/** Wait for the game to begin */
|
||||
telemetry.addData(">", "Press Play to start tracking");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
/** Start tracking the data sets we care about. */
|
||||
stonesAndChips.activate();
|
||||
|
||||
while (opModeIsActive()) {
|
||||
|
||||
for (VuforiaTrackable trackable : allTrackables) {
|
||||
/**
|
||||
* getUpdatedRobotLocation() will return null if no new information is available since
|
||||
* the last time that call was made, or if the trackable is not currently visible.
|
||||
* getRobotLocation() will return null if the trackable is not currently visible.
|
||||
*/
|
||||
telemetry.addData(trackable.getName(), ((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible() ? "Visible" : "Not Visible"); //
|
||||
|
||||
OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation();
|
||||
if (robotLocationTransform != null) {
|
||||
lastLocation = robotLocationTransform;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Provide feedback as to where the robot was last located (if we know).
|
||||
*/
|
||||
if (lastLocation != null) {
|
||||
// RobotLog.vv(TAG, "robot=%s", format(lastLocation));
|
||||
telemetry.addData("Pos", format(lastLocation));
|
||||
} else {
|
||||
telemetry.addData("Pos", "Unknown");
|
||||
}
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A simple utility that extracts positioning information from a transformation matrix
|
||||
* and formats it in a form palatable to a human being.
|
||||
*/
|
||||
String format(OpenGLMatrix transformationMatrix) {
|
||||
return transformationMatrix.formatAsTransform();
|
||||
}
|
||||
}
|
@ -1,461 +0,0 @@
|
||||
/* Copyright (c) 2017 FIRST. All rights reserved.
|
||||
*
|
||||
* Redistribution and use in source and binary forms, with or without modification,
|
||||
* are permitted (subject to the limitations in the disclaimer below) provided that
|
||||
* the following conditions are met:
|
||||
*
|
||||
* Redistributions of source code must retain the above copyright notice, this list
|
||||
* of conditions and the following disclaimer.
|
||||
*
|
||||
* Redistributions in binary form must reproduce the above copyright notice, this
|
||||
* list of conditions and the following disclaimer in the documentation and/or
|
||||
* other materials provided with the distribution.
|
||||
*
|
||||
* Neither the name of FIRST nor the names of its contributors may be used to endorse or
|
||||
* promote products derived from this software without specific prior written permission.
|
||||
*
|
||||
* NO EXPRESS OR IMPLIED LICENSES TO ANY PARTY'S PATENT RIGHTS ARE GRANTED BY THIS
|
||||
* LICENSE. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
|
||||
* THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
||||
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
|
||||
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
||||
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
||||
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
||||
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
||||
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
*/
|
||||
|
||||
package org.firstinspires.ftc.robotcontroller.external.samples;
|
||||
|
||||
import android.graphics.Bitmap;
|
||||
|
||||
import com.qualcomm.robotcore.eventloop.opmode.TeleOp;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.Disabled;
|
||||
import com.qualcomm.robotcore.eventloop.opmode.LinearOpMode;
|
||||
import com.qualcomm.robotcore.util.RobotLog;
|
||||
import com.qualcomm.robotcore.util.ThreadPool;
|
||||
import com.vuforia.Frame;
|
||||
|
||||
import org.firstinspires.ftc.robotcore.external.ClassFactory;
|
||||
import org.firstinspires.ftc.robotcore.external.function.Consumer;
|
||||
import org.firstinspires.ftc.robotcore.external.function.Continuation;
|
||||
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.MatrixF;
|
||||
import org.firstinspires.ftc.robotcore.external.matrices.OpenGLMatrix;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AxesOrder;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.AxesReference;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.Orientation;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaLocalizer;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackable;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackableDefaultListener;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.VuforiaTrackables;
|
||||
import org.firstinspires.ftc.robotcore.internal.system.AppUtil;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
|
||||
/**
|
||||
* This 2016-2017 OpMode illustrates the basics of using the Vuforia localizer to determine
|
||||
* positioning and orientation of robot on the FTC field.
|
||||
* The code is structured as a LinearOpMode
|
||||
*
|
||||
* Vuforia uses the phone's camera to inspect it's surroundings, and attempt to locate target images.
|
||||
*
|
||||
* When images are located, Vuforia is able to determine the position and orientation of the
|
||||
* image relative to the camera. This sample code than combines that information with a
|
||||
* knowledge of where the target images are on the field, to determine the location of the camera.
|
||||
*
|
||||
* This example assumes a "diamond" field configuration where the red and blue alliance stations
|
||||
* are adjacent on the corner of the field furthest from the audience.
|
||||
* From the Audience perspective, the Red driver station is on the right.
|
||||
* The two vision target are located on the two walls closest to the audience, facing in.
|
||||
* The Stones are on the RED side of the field, and the Chips are on the Blue side.
|
||||
*
|
||||
* A final calculation then uses the location of the camera on the robot to determine the
|
||||
* robot's location and orientation on the field.
|
||||
*
|
||||
* @see VuforiaLocalizer
|
||||
* @see VuforiaTrackableDefaultListener
|
||||
* see ftc_app/doc/tutorial/FTC_FieldCoordinateSystemDefinition.pdf
|
||||
*
|
||||
* Use Android Studio to Copy this Class, and Paste it into your team's code folder with a new name.
|
||||
* Remove or comment out the @Disabled line to add this opmode to the Driver Station OpMode list.
|
||||
*
|
||||
* IMPORTANT: In order to use this OpMode, you need to obtain your own Vuforia license key as
|
||||
* is explained below.
|
||||
*/
|
||||
|
||||
@TeleOp(name="Concept: Vuforia Nav Webcam", group ="Concept")
|
||||
@Disabled
|
||||
public class ConceptVuforiaNavigationWebcam extends LinearOpMode {
|
||||
|
||||
public static final String TAG = "Vuforia Navigation Sample";
|
||||
|
||||
OpenGLMatrix lastLocation = null;
|
||||
|
||||
/**
|
||||
* @see #captureFrameToFile()
|
||||
*/
|
||||
int captureCounter = 0;
|
||||
File captureDirectory = AppUtil.ROBOT_DATA_DIR;
|
||||
|
||||
/**
|
||||
* {@link #vuforia} is the variable we will use to store our instance of the Vuforia
|
||||
* localization engine.
|
||||
*/
|
||||
VuforiaLocalizer vuforia;
|
||||
|
||||
/**
|
||||
* This is the webcam we are to use. As with other hardware devices such as motors and
|
||||
* servos, this device is identified using the robot configuration tool in the FTC application.
|
||||
*/
|
||||
WebcamName webcamName;
|
||||
|
||||
@Override public void runOpMode() {
|
||||
|
||||
/*
|
||||
* Retrieve the camera we are to use.
|
||||
*/
|
||||
webcamName = hardwareMap.get(WebcamName.class, "Webcam 1");
|
||||
|
||||
/*
|
||||
* To start up Vuforia, tell it the view that we wish to use for camera monitor (on the RC phone);
|
||||
* If no camera monitor is desired, use the parameterless constructor instead (commented out below).
|
||||
*/
|
||||
int cameraMonitorViewId = hardwareMap.appContext.getResources().getIdentifier("cameraMonitorViewId", "id", hardwareMap.appContext.getPackageName());
|
||||
VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters(cameraMonitorViewId);
|
||||
|
||||
// OR... Do Not Activate the Camera Monitor View, to save power
|
||||
// VuforiaLocalizer.Parameters parameters = new VuforiaLocalizer.Parameters();
|
||||
|
||||
/*
|
||||
* IMPORTANT: You need to obtain your own license key to use Vuforia. The string below with which
|
||||
* 'parameters.vuforiaLicenseKey' is initialized is for illustration only, and will not function.
|
||||
* A Vuforia 'Development' license key, can be obtained free of charge from the Vuforia developer
|
||||
* web site at https://developer.vuforia.com/license-manager.
|
||||
*
|
||||
* Vuforia license keys are always 380 characters long, and look as if they contain mostly
|
||||
* random data. As an example, here is a example of a fragment of a valid key:
|
||||
* ... yIgIzTqZ4mWjk9wd3cZO9T1axEqzuhxoGlfOOI2dRzKS4T0hQ8kT ...
|
||||
* Once you've obtained a license key, copy the string from the Vuforia web site
|
||||
* and paste it in to your code on the next line, between the double quotes.
|
||||
*/
|
||||
parameters.vuforiaLicenseKey = " -- YOUR NEW VUFORIA KEY GOES HERE --- ";
|
||||
|
||||
/**
|
||||
* We also indicate which camera on the RC we wish to use.
|
||||
*/
|
||||
parameters.cameraName = webcamName;
|
||||
|
||||
/**
|
||||
* Instantiate the Vuforia engine
|
||||
*/
|
||||
vuforia = ClassFactory.getInstance().createVuforia(parameters);
|
||||
|
||||
/**
|
||||
* Because this opmode processes frames in order to write them to a file, we tell Vuforia
|
||||
* that we want to ensure that certain frame formats are available in the {@link Frame}s we
|
||||
* see.
|
||||
*/
|
||||
vuforia.enableConvertFrameToBitmap();
|
||||
|
||||
/** @see #captureFrameToFile() */
|
||||
AppUtil.getInstance().ensureDirectoryExists(captureDirectory);
|
||||
|
||||
|
||||
/**
|
||||
* Load the data sets that for the trackable objects we wish to track. These particular data
|
||||
* sets are stored in the 'assets' part of our application (you'll see them in the Android
|
||||
* Studio 'Project' view over there on the left of the screen). You can make your own datasets
|
||||
* with the Vuforia Target Manager: https://developer.vuforia.com/target-manager. PDFs for the
|
||||
* example "StonesAndChips", datasets can be found in in this project in the
|
||||
* documentation directory.
|
||||
*/
|
||||
VuforiaTrackables stonesAndChips = vuforia.loadTrackablesFromAsset("StonesAndChips");
|
||||
VuforiaTrackable redTarget = stonesAndChips.get(0);
|
||||
redTarget.setName("RedTarget"); // Stones
|
||||
|
||||
VuforiaTrackable blueTarget = stonesAndChips.get(1);
|
||||
blueTarget.setName("BlueTarget"); // Chips
|
||||
|
||||
/** For convenience, gather together all the trackable objects in one easily-iterable collection */
|
||||
List<VuforiaTrackable> allTrackables = new ArrayList<VuforiaTrackable>();
|
||||
allTrackables.addAll(stonesAndChips);
|
||||
|
||||
/**
|
||||
* We use units of mm here because that's the recommended units of measurement for the
|
||||
* size values specified in the XML for the ImageTarget trackables in data sets. E.g.:
|
||||
* <ImageTarget name="stones" size="247 173"/>
|
||||
* You don't *have to* use mm here, but the units here and the units used in the XML
|
||||
* target configuration files *must* correspond for the math to work out correctly.
|
||||
*/
|
||||
float mmPerInch = 25.4f;
|
||||
float mmBotWidth = 18 * mmPerInch; // ... or whatever is right for your robot
|
||||
float mmFTCFieldWidth = (12*12 - 2) * mmPerInch; // the FTC field is ~11'10" center-to-center of the glass panels
|
||||
|
||||
/**
|
||||
* In order for localization to work, we need to tell the system where each target we
|
||||
* wish to use for navigation resides on the field, and we need to specify where on the robot
|
||||
* the camera resides. These specifications are in the form of <em>transformation matrices.</em>
|
||||
* Transformation matrices are a central, important concept in the math here involved in localization.
|
||||
* See <a href="https://en.wikipedia.org/wiki/Transformation_matrix">Transformation Matrix</a>
|
||||
* for detailed information. Commonly, you'll encounter transformation matrices as instances
|
||||
* of the {@link OpenGLMatrix} class.
|
||||
*
|
||||
* For the most part, you don't need to understand the details of the math of how transformation
|
||||
* matrices work inside (as fascinating as that is, truly). Just remember these key points:
|
||||
* <ol>
|
||||
*
|
||||
* <li>You can put two transformations together to produce a third that combines the effect of
|
||||
* both of them. If, for example, you have a rotation transform R and a translation transform T,
|
||||
* then the combined transformation matrix RT which does the rotation first and then the translation
|
||||
* is given by {@code RT = T.multiplied(R)}. That is, the transforms are multiplied in the
|
||||
* <em>reverse</em> of the chronological order in which they applied.</li>
|
||||
*
|
||||
* <li>A common way to create useful transforms is to use methods in the {@link OpenGLMatrix}
|
||||
* class and the Orientation class. See, for example, {@link OpenGLMatrix#translation(float,
|
||||
* float, float)}, {@link OpenGLMatrix#rotation(AngleUnit, float, float, float, float)}, and
|
||||
* {@link Orientation#getRotationMatrix(AxesReference, AxesOrder, AngleUnit, float, float, float)}.
|
||||
* Related methods in {@link OpenGLMatrix}, such as {@link OpenGLMatrix#rotated(AngleUnit,
|
||||
* float, float, float, float)}, are syntactic shorthands for creating a new transform and
|
||||
* then immediately multiplying the receiver by it, which can be convenient at times.</li>
|
||||
*
|
||||
* <li>If you want to break open the black box of a transformation matrix to understand
|
||||
* what it's doing inside, use {@link MatrixF#getTranslation()} to fetch how much the
|
||||
* transform will move you in x, y, and z, and use {@link Orientation#getOrientation(MatrixF,
|
||||
* AxesReference, AxesOrder, AngleUnit)} to determine the rotational motion that the transform
|
||||
* will impart. See {@link #format(OpenGLMatrix)} below for an example.</li>
|
||||
*
|
||||
* </ol>
|
||||
*
|
||||
* This example places the "stones" image on the perimeter wall to the Left
|
||||
* of the Red Driver station wall. Similar to the Red Beacon Location on the Res-Q
|
||||
*
|
||||
* This example places the "chips" image on the perimeter wall to the Right
|
||||
* of the Blue Driver station. Similar to the Blue Beacon Location on the Res-Q
|
||||
*
|
||||
* See the doc folder of this project for a description of the Field Coordinate System
|
||||
* conventions.
|
||||
*
|
||||
* Initially the target is conceptually lying at the origin of the Field Coordinate System
|
||||
* (the center of the field), facing up.
|
||||
*
|
||||
* In this configuration, the target's coordinate system aligns with that of the field.
|
||||
*
|
||||
* In a real situation we'd also account for the vertical (Z) offset of the target,
|
||||
* but for simplicity, we ignore that here; for a real robot, you'll want to fix that.
|
||||
*
|
||||
* To place the Stones Target on the Red Audience wall:
|
||||
* - First we rotate it 90 around the field's X axis to flip it upright
|
||||
* - Then we rotate it 90 around the field's Z access to face it away from the audience.
|
||||
* - Finally, we translate it back along the X axis towards the red audience wall.
|
||||
*/
|
||||
OpenGLMatrix redTargetLocationOnField = OpenGLMatrix
|
||||
/* Then we translate the target off to the RED WALL. Our translation here
|
||||
is a negative translation in X.*/
|
||||
.translation(-mmFTCFieldWidth/2, 0, 0)
|
||||
.multiplied(Orientation.getRotationMatrix(
|
||||
/* First, in the fixed (field) coordinate system, we rotate 90deg in X, then 90 in Z */
|
||||
AxesReference.EXTRINSIC, AxesOrder.XZX,
|
||||
AngleUnit.DEGREES, 90, 90, 0));
|
||||
redTarget.setLocationFtcFieldFromTarget(redTargetLocationOnField);
|
||||
RobotLog.ii(TAG, "Red Target=%s", format(redTargetLocationOnField));
|
||||
|
||||
/*
|
||||
* To place the Stones Target on the Blue Audience wall:
|
||||
* - First we rotate it 90 around the field's X axis to flip it upright
|
||||
* - Finally, we translate it along the Y axis towards the blue audience wall.
|
||||
*/
|
||||
OpenGLMatrix blueTargetLocationOnField = OpenGLMatrix
|
||||
/* Then we translate the target off to the Blue Audience wall.
|
||||
Our translation here is a positive translation in Y.*/
|
||||
.translation(0, mmFTCFieldWidth/2, 0)
|
||||
.multiplied(Orientation.getRotationMatrix(
|
||||
/* First, in the fixed (field) coordinate system, we rotate 90deg in X */
|
||||
AxesReference.EXTRINSIC, AxesOrder.XZX,
|
||||
AngleUnit.DEGREES, 90, 0, 0));
|
||||
blueTarget.setLocationFtcFieldFromTarget(blueTargetLocationOnField);
|
||||
RobotLog.ii(TAG, "Blue Target=%s", format(blueTargetLocationOnField));
|
||||
|
||||
/**
|
||||
* We also need to tell Vuforia where the <em>cameras</em> are relative to the robot.
|
||||
*
|
||||
* Just as there is a Field Coordinate System, so too there is a Robot Coordinate System.
|
||||
* The two share many similarities. The origin of the Robot Coordinate System is wherever
|
||||
* you choose to make it on the robot, but typically you'd choose somewhere in the middle
|
||||
* of the robot. From that origin, the Y axis is horizontal and positive out towards the
|
||||
* "front" of the robot (however you choose "front" to be defined), the X axis is horizontal
|
||||
* and positive out towards the "right" of the robot (i.e.: 90deg horizontally clockwise from
|
||||
* the positive Y axis), and the Z axis is vertical towards the sky.
|
||||
*
|
||||
* Similarly, for each camera there is a Camera Coordinate System. The origin of a Camera
|
||||
* Coordinate System lies in the middle of the sensor inside of the camera. The Z axis is
|
||||
* positive coming out of the lens of the camera in a direction perpendicular to the plane
|
||||
* of the sensor. When looking at the face of the lens of the camera (down the positive Z
|
||||
* axis), the X axis is positive off to the right in the plane of the sensor, and the Y axis
|
||||
* is positive out the top of the lens in the plane of the sensor at 90 horizontally
|
||||
* counter clockwise from the X axis.
|
||||
*
|
||||
* Next, there is Phone Coordinate System (for robots that have phones, of course), though
|
||||
* with the advent of Vuforia support for Webcams, this coordinate system is less significant
|
||||
* than it was previously. The Phone Coordinate System is defined thusly: with the phone in
|
||||
* flat front of you in portrait mode (i.e. as it is when running the robot controller app)
|
||||
* and you are staring straight at the face of the phone,
|
||||
* * X is positive heading off to your right,
|
||||
* * Y is positive heading up through the top edge of the phone, and
|
||||
* * Z is pointing out of the screen, toward you.
|
||||
* The origin of the Phone Coordinate System is at the origin of the Camera Coordinate System
|
||||
* of the front-facing camera on the phone.
|
||||
*
|
||||
* Finally, it is worth noting that trackable Vuforia Image Targets have their <em>own</em>
|
||||
* coordinate system (see {@link VuforiaTrackable}. This is sometimes referred to as the
|
||||
* Target Coordinate System. In keeping with the above, when looking at the target in its
|
||||
* natural orientation, in the Target Coodinate System
|
||||
* * X is positive heading off to your right,
|
||||
* * Y is positive heading up through the top edge of the target, and
|
||||
* * Z is pointing out of the target, toward you.
|
||||
*
|
||||
* One can observe that the Camera Coordinate System of the front-facing camera on a phone
|
||||
* coincides with the Phone Coordinate System. Further, when a phone is placed on its back
|
||||
* at the origin of the Robot Coordinate System and aligned appropriately, those coordinate
|
||||
* systems also coincide with the Robot Coordinate System. Got it?
|
||||
*
|
||||
* In this example here, we're going to assume that we put the camera on the right side
|
||||
* of the robot (facing outwards, of course). To determine the transformation matrix that
|
||||
* describes that location, first consider the camera as lying on its back at the origin
|
||||
* of the Robot Coordinate System such that the Camera Coordinate System and Robot Coordinate
|
||||
* System coincide. Then the transformation we need is
|
||||
* * first a rotation of the camera by +90deg along the robot X axis,
|
||||
* * then a rotation of the camera by +90deg along the robot Z axis, and
|
||||
* * finally a translation of the camera to the side of the robot.
|
||||
*
|
||||
* When determining whether a rotation is positive or negative, consider yourself as looking
|
||||
* down the (positive) axis of rotation from the positive towards the origin. Positive rotations
|
||||
* are then CCW, and negative rotations CW. An example: consider looking down the positive Z
|
||||
* axis towards the origin. A positive rotation about Z (ie: a rotation parallel to the the X-Y
|
||||
* plane) is then CCW, as one would normally expect from the usual classic 2D geometry.
|
||||
*/
|
||||
|
||||
OpenGLMatrix robotFromCamera = OpenGLMatrix
|
||||
.translation(mmBotWidth/2,0,0)
|
||||
.multiplied(Orientation.getRotationMatrix(
|
||||
AxesReference.EXTRINSIC, AxesOrder.XZY,
|
||||
AngleUnit.DEGREES, 90, 90, 0));
|
||||
RobotLog.ii(TAG, "camera=%s", format(robotFromCamera));
|
||||
|
||||
/**
|
||||
* Let the trackable listeners we care about know where the camera is. We know that each
|
||||
* listener is a {@link VuforiaTrackableDefaultListener} and can so safely cast because
|
||||
* we have not ourselves installed a listener of a different type.
|
||||
*/
|
||||
((VuforiaTrackableDefaultListener)redTarget.getListener()).setCameraLocationOnRobot(parameters.cameraName, robotFromCamera);
|
||||
((VuforiaTrackableDefaultListener)blueTarget.getListener()).setCameraLocationOnRobot(parameters.cameraName, robotFromCamera);
|
||||
|
||||
/**
|
||||
* A brief tutorial: here's how all the math is going to work:
|
||||
*
|
||||
* C = robotFromCamera maps camera coords -> robot coords
|
||||
* P = tracker.getPose() maps image target coords -> camera coords
|
||||
* L = redTargetLocationOnField maps image target coords -> field coords
|
||||
*
|
||||
* So
|
||||
*
|
||||
* C.inverted() maps robot coords -> camera coords
|
||||
* P.inverted() maps camera coords -> imageTarget coords
|
||||
*
|
||||
* Putting that all together,
|
||||
*
|
||||
* L x P.inverted() x C.inverted() maps robot coords to field coords.
|
||||
*
|
||||
* @see VuforiaTrackableDefaultListener#getRobotLocation()
|
||||
*/
|
||||
|
||||
/** Wait for the game to begin */
|
||||
telemetry.addData(">", "Press Play to start tracking");
|
||||
telemetry.update();
|
||||
waitForStart();
|
||||
|
||||
/** Start tracking the data sets we care about. */
|
||||
stonesAndChips.activate();
|
||||
|
||||
boolean buttonPressed = false;
|
||||
while (opModeIsActive()) {
|
||||
|
||||
if (gamepad1.a && !buttonPressed) {
|
||||
captureFrameToFile();
|
||||
}
|
||||
buttonPressed = gamepad1.a;
|
||||
|
||||
for (VuforiaTrackable trackable : allTrackables) {
|
||||
/**
|
||||
* getUpdatedRobotLocation() will return null if no new information is available since
|
||||
* the last time that call was made, or if the trackable is not currently visible.
|
||||
* getRobotLocation() will return null if the trackable is not currently visible.
|
||||
*/
|
||||
telemetry.addData(trackable.getName(), ((VuforiaTrackableDefaultListener)trackable.getListener()).isVisible() ? "Visible" : "Not Visible"); //
|
||||
|
||||
OpenGLMatrix robotLocationTransform = ((VuforiaTrackableDefaultListener)trackable.getListener()).getUpdatedRobotLocation();
|
||||
if (robotLocationTransform != null) {
|
||||
lastLocation = robotLocationTransform;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Provide feedback as to where the robot was last located (if we know).
|
||||
*/
|
||||
if (lastLocation != null) {
|
||||
// RobotLog.vv(TAG, "robot=%s", format(lastLocation));
|
||||
telemetry.addData("Pos", format(lastLocation));
|
||||
} else {
|
||||
telemetry.addData("Pos", "Unknown");
|
||||
}
|
||||
telemetry.update();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* A simple utility that extracts positioning information from a transformation matrix
|
||||
* and formats it in a form palatable to a human being.
|
||||
*/
|
||||
String format(OpenGLMatrix transformationMatrix) {
|
||||
return transformationMatrix.formatAsTransform();
|
||||
}
|
||||
|
||||
/**
|
||||
* Sample one frame from the Vuforia stream and write it to a .PNG image file on the robot
|
||||
* controller in the /sdcard/FIRST/data directory. The images can be downloaded using Android
|
||||
* Studio's Device File Explorer, ADB, or the Media Transfer Protocol (MTP) integration into
|
||||
* Windows Explorer, among other means. The images can be useful during robot design and calibration
|
||||
* in order to get a sense of what the camera is actually seeing and so assist in camera
|
||||
* aiming and alignment.
|
||||
*/
|
||||
void captureFrameToFile() {
|
||||
vuforia.getFrameOnce(Continuation.create(ThreadPool.getDefault(), new Consumer<Frame>()
|
||||
{
|
||||
@Override public void accept(Frame frame)
|
||||
{
|
||||
Bitmap bitmap = vuforia.convertFrameToBitmap(frame);
|
||||
if (bitmap != null) {
|
||||
File file = new File(captureDirectory, String.format(Locale.getDefault(), "VuforiaFrame-%d.png", captureCounter++));
|
||||
try {
|
||||
FileOutputStream outputStream = new FileOutputStream(file);
|
||||
try {
|
||||
bitmap.compress(Bitmap.CompressFormat.PNG, 100, outputStream);
|
||||
} finally {
|
||||
outputStream.close();
|
||||
telemetry.log().add("captured %s", file.getName());
|
||||
}
|
||||
} catch (IOException e) {
|
||||
RobotLog.ee(TAG, e, "exception in captureFrameToFile()");
|
||||
}
|
||||
}
|
||||
}
|
||||
}));
|
||||
}
|
||||
}
|
Binary file not shown.
@ -40,7 +40,6 @@ import android.content.Intent;
|
||||
import android.content.ServiceConnection;
|
||||
import android.content.SharedPreferences;
|
||||
import android.content.res.Configuration;
|
||||
import android.content.res.Resources;
|
||||
import android.hardware.usb.UsbDevice;
|
||||
import android.hardware.usb.UsbManager;
|
||||
import android.net.wifi.WifiManager;
|
||||
@ -89,6 +88,7 @@ import com.qualcomm.robotcore.hardware.configuration.LynxConstants;
|
||||
import com.qualcomm.robotcore.hardware.configuration.Utility;
|
||||
import com.qualcomm.robotcore.robot.Robot;
|
||||
import com.qualcomm.robotcore.robot.RobotState;
|
||||
import com.qualcomm.robotcore.util.ClockWarningSource;
|
||||
import com.qualcomm.robotcore.util.Device;
|
||||
import com.qualcomm.robotcore.util.Dimmer;
|
||||
import com.qualcomm.robotcore.util.ImmersiveMode;
|
||||
@ -99,8 +99,10 @@ import com.qualcomm.robotcore.wifi.NetworkConnectionFactory;
|
||||
import com.qualcomm.robotcore.wifi.NetworkType;
|
||||
|
||||
import org.firstinspires.ftc.ftccommon.external.SoundPlayingRobotMonitor;
|
||||
import org.firstinspires.ftc.ftccommon.internal.AnnotatedHooksClassFilter;
|
||||
import org.firstinspires.ftc.ftccommon.internal.FtcRobotControllerWatchdogService;
|
||||
import org.firstinspires.ftc.ftccommon.internal.ProgramAndManageActivity;
|
||||
import org.firstinspires.ftc.onbotjava.ExternalLibraries;
|
||||
import org.firstinspires.ftc.onbotjava.OnBotJavaHelperImpl;
|
||||
import org.firstinspires.ftc.onbotjava.OnBotJavaProgrammingMode;
|
||||
import org.firstinspires.ftc.robotcore.external.navigation.MotionDetection;
|
||||
@ -112,6 +114,7 @@ import org.firstinspires.ftc.robotcore.internal.network.WifiDirectChannelChanger
|
||||
import org.firstinspires.ftc.robotcore.internal.network.WifiMuteEvent;
|
||||
import org.firstinspires.ftc.robotcore.internal.network.WifiMuteStateMachine;
|
||||
import org.firstinspires.ftc.robotcore.internal.opmode.ClassManager;
|
||||
import org.firstinspires.ftc.robotcore.internal.opmode.OnBotJavaHelper;
|
||||
import org.firstinspires.ftc.robotcore.internal.system.AppAliveNotifier;
|
||||
import org.firstinspires.ftc.robotcore.internal.system.AppUtil;
|
||||
import org.firstinspires.ftc.robotcore.internal.system.Assert;
|
||||
@ -122,7 +125,10 @@ import org.firstinspires.ftc.robotcore.internal.ui.UILocation;
|
||||
import org.firstinspires.ftc.robotcore.internal.webserver.RobotControllerWebInfo;
|
||||
import org.firstinspires.ftc.robotserver.internal.programmingmode.ProgrammingModeManager;
|
||||
import org.firstinspires.inspection.RcInspectionActivity;
|
||||
import org.threeten.bp.YearMonth;
|
||||
import org.xmlpull.v1.XmlPullParserException;
|
||||
|
||||
import java.io.FileNotFoundException;
|
||||
import java.util.List;
|
||||
import java.util.Queue;
|
||||
import java.util.concurrent.ConcurrentLinkedQueue;
|
||||
@ -139,6 +145,8 @@ public class FtcRobotControllerActivity extends Activity
|
||||
protected WifiManager.WifiLock wifiLock;
|
||||
protected RobotConfigFileManager cfgFileMgr;
|
||||
|
||||
private OnBotJavaHelper onBotJavaHelper;
|
||||
|
||||
protected ProgrammingModeManager programmingModeManager;
|
||||
|
||||
protected UpdateUI.Callback callback;
|
||||
@ -298,6 +306,18 @@ public class FtcRobotControllerActivity extends Activity
|
||||
preferencesHelper.writeBooleanPrefIfDifferent(context.getString(R.string.pref_rc_connected), true);
|
||||
preferencesHelper.getSharedPreferences().registerOnSharedPreferenceChangeListener(sharedPreferencesListener);
|
||||
|
||||
// Check if this RC app is from a later FTC season that what was installed previously
|
||||
int ftcSeasonYearOfPreviouslyInstalledRc = preferencesHelper.readInt(getString(R.string.pref_ftc_season_year_of_current_rc), 0);
|
||||
int ftcSeasonYearOfCurrentlyInstalledRc = AppUtil.getInstance().getFtcSeasonYear(YearMonth.now()).getValue();
|
||||
if (ftcSeasonYearOfCurrentlyInstalledRc > ftcSeasonYearOfPreviouslyInstalledRc) {
|
||||
preferencesHelper.writeIntPrefIfDifferent(getString(R.string.pref_ftc_season_year_of_current_rc), ftcSeasonYearOfCurrentlyInstalledRc);
|
||||
// Since it's a new FTC season, we should reset certain settings back to their default values.
|
||||
preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_2_4_ghz_band), true);
|
||||
preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_obsolete_software), true);
|
||||
preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_mismatched_app_versions), true);
|
||||
preferencesHelper.writeBooleanPrefIfDifferent(getString(R.string.pref_warn_about_incorrect_clocks), true);
|
||||
}
|
||||
|
||||
entireScreenLayout = (LinearLayout) findViewById(R.id.entire_screen);
|
||||
buttonMenu = (ImageButton) findViewById(R.id.menu_buttons);
|
||||
buttonMenu.setOnClickListener(new View.OnClickListener() {
|
||||
@ -311,6 +331,8 @@ public class FtcRobotControllerActivity extends Activity
|
||||
}
|
||||
});
|
||||
popupMenu.inflate(R.menu.ftc_robot_controller);
|
||||
AnnotatedHooksClassFilter.getInstance().callOnCreateMenuMethods(
|
||||
FtcRobotControllerActivity.this, popupMenu.getMenu());
|
||||
popupMenu.show();
|
||||
}
|
||||
});
|
||||
@ -319,6 +341,9 @@ public class FtcRobotControllerActivity extends Activity
|
||||
|
||||
BlocksOpMode.setActivityAndWebView(this, (WebView) findViewById(R.id.webViewBlocksRuntime));
|
||||
|
||||
ExternalLibraries.getInstance().onCreate();
|
||||
onBotJavaHelper = new OnBotJavaHelperImpl();
|
||||
|
||||
/*
|
||||
* Paranoia as the ClassManagerFactory requires EXTERNAL_STORAGE permissions
|
||||
* and we've seen on the DS where the finish() call above does not short-circuit
|
||||
@ -326,7 +351,7 @@ public class FtcRobotControllerActivity extends Activity
|
||||
* have permissions. So...
|
||||
*/
|
||||
if (permissionsValidated) {
|
||||
ClassManager.getInstance().setOnBotJavaClassHelper(new OnBotJavaHelperImpl());
|
||||
ClassManager.getInstance().setOnBotJavaClassHelper(onBotJavaHelper);
|
||||
ClassManagerFactory.registerFilters();
|
||||
ClassManagerFactory.processAllClasses();
|
||||
}
|
||||
@ -379,10 +404,12 @@ public class FtcRobotControllerActivity extends Activity
|
||||
initWifiMute(true);
|
||||
}
|
||||
|
||||
FtcAboutActivity.setBuildTimeFromBuildConfig(BuildConfig.BUILD_TIME);
|
||||
FtcAboutActivity.setBuildTimeFromBuildConfig(BuildConfig.APP_BUILD_TIME);
|
||||
|
||||
// check to see if there is a preferred Wi-Fi to use.
|
||||
checkPreferredChannel();
|
||||
|
||||
AnnotatedHooksClassFilter.getInstance().callOnCreateMethods(this);
|
||||
}
|
||||
|
||||
protected UpdateUI createUpdateUI() {
|
||||
@ -417,6 +444,9 @@ public class FtcRobotControllerActivity extends Activity
|
||||
protected void onResume() {
|
||||
super.onResume();
|
||||
RobotLog.vv(TAG, "onResume()");
|
||||
|
||||
// In case the user just got back from fixing their clock, refresh ClockWarningSource
|
||||
ClockWarningSource.getInstance().onPossibleRcClockUpdate();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -451,6 +481,8 @@ public class FtcRobotControllerActivity extends Activity
|
||||
if (preferencesHelper != null) preferencesHelper.getSharedPreferences().unregisterOnSharedPreferenceChangeListener(sharedPreferencesListener);
|
||||
|
||||
RobotLog.cancelWriteLogcatToDisk();
|
||||
|
||||
AnnotatedHooksClassFilter.getInstance().callOnDestroyMethods(this);
|
||||
}
|
||||
|
||||
protected void bindToService() {
|
||||
@ -481,18 +513,8 @@ public class FtcRobotControllerActivity extends Activity
|
||||
}
|
||||
|
||||
protected void readNetworkType() {
|
||||
|
||||
// The code here used to defer to the value found in a configuration file
|
||||
// to configure the network type. If the file was absent, then it initialized
|
||||
// it with a default.
|
||||
//
|
||||
// However, bugs have been reported with that approach (empty config files, specifically).
|
||||
// Moreover, the non-Wifi-Direct networking is end-of-life, so the simplest and most robust
|
||||
// (e.g.: no one can screw things up by messing with the contents of the config file) fix is
|
||||
// to do away with configuration file entirely.
|
||||
//
|
||||
// Control hubs are always running the access point model. Everything else, for the time
|
||||
// being always runs the wifi direct model.
|
||||
// being always runs the Wi-Fi Direct model.
|
||||
if (Device.isRevControlHub() == true) {
|
||||
networkType = NetworkType.RCWIRELESSAP;
|
||||
} else {
|
||||
@ -516,6 +538,7 @@ public class FtcRobotControllerActivity extends Activity
|
||||
@Override
|
||||
public boolean onCreateOptionsMenu(Menu menu) {
|
||||
getMenuInflater().inflate(R.menu.ftc_robot_controller, menu);
|
||||
AnnotatedHooksClassFilter.getInstance().callOnCreateMenuMethods(this, menu);
|
||||
return true;
|
||||
}
|
||||
|
||||
@ -660,6 +683,8 @@ public class FtcRobotControllerActivity extends Activity
|
||||
controllerService = service;
|
||||
updateUI.setControllerService(controllerService);
|
||||
|
||||
controllerService.setOnBotJavaHelper(onBotJavaHelper);
|
||||
|
||||
updateUIAndRequestRobotSetup();
|
||||
programmingModeManager.setState(new FtcRobotControllerServiceState() {
|
||||
@NonNull
|
||||
@ -668,11 +693,20 @@ public class FtcRobotControllerActivity extends Activity
|
||||
return service.getWebServer();
|
||||
}
|
||||
|
||||
@Nullable
|
||||
@Override
|
||||
public OnBotJavaHelper getOnBotJavaHelper() {
|
||||
return service.getOnBotJavaHelper();
|
||||
}
|
||||
|
||||
@Override
|
||||
public EventLoopManager getEventLoopManager() {
|
||||
return service.getRobot().eventLoopManager;
|
||||
}
|
||||
});
|
||||
|
||||
AnnotatedHooksClassFilter.getInstance().callWebHandlerRegistrarMethods(this,
|
||||
service.getWebServer().getWebHandlerManager());
|
||||
}
|
||||
|
||||
private void updateUIAndRequestRobotSetup() {
|
||||
@ -697,10 +731,15 @@ public class FtcRobotControllerActivity extends Activity
|
||||
HardwareFactory hardwareFactory = new HardwareFactory(context);
|
||||
try {
|
||||
hardwareFactory.setXmlPullParser(file.getXml());
|
||||
} catch (Resources.NotFoundException e) {
|
||||
} catch (FileNotFoundException | XmlPullParserException e) {
|
||||
RobotLog.ww(TAG, e, "Unable to set configuration file %s. Falling back on noConfig.", file.getName());
|
||||
file = RobotConfigFile.noConfig(cfgFileMgr);
|
||||
hardwareFactory.setXmlPullParser(file.getXml());
|
||||
cfgFileMgr.setActiveConfigAndUpdateUI(false, file);
|
||||
try {
|
||||
hardwareFactory.setXmlPullParser(file.getXml());
|
||||
cfgFileMgr.setActiveConfigAndUpdateUI(false, file);
|
||||
} catch (FileNotFoundException | XmlPullParserException e1) {
|
||||
RobotLog.ee(TAG, e1, "Failed to fall back on noConfig");
|
||||
}
|
||||
}
|
||||
|
||||
OpModeRegister userOpModeRegister = createOpModeRegister();
|
||||
@ -712,6 +751,8 @@ public class FtcRobotControllerActivity extends Activity
|
||||
|
||||
passReceivedUsbAttachmentsToEventLoop();
|
||||
AndroidBoard.showErrorIfUnknownControlHub();
|
||||
|
||||
AnnotatedHooksClassFilter.getInstance().callOnCreateEventLoopMethods(this, eventLoop);
|
||||
}
|
||||
|
||||
protected OpModeRegister createOpModeRegister() {
|
||||
|
@ -65,6 +65,8 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
<item>@style/AppThemeTealRC</item>
|
||||
</integer-array>
|
||||
|
||||
<string name="pref_ftc_season_year_of_current_rc">pref_ftc_season_year_of_current_rc</string>
|
||||
|
||||
<string name="packageName">@string/packageNameRobotController</string>
|
||||
|
||||
</resources>
|
||||
|
Reference in New Issue
Block a user