Updated ftclib.

Fixed NullPointerException in FtcTest trying to display ExposureSettings which is only available for VisionPortal and not in RawColorBlob tuning.
Added Limelight 3A support.
Removed TensorFlow references.
This commit is contained in:
Titan Robotics Club
2024-09-11 19:16:39 -07:00
parent 5ee9bba8a0
commit fa32f5ba34
5 changed files with 76 additions and 16 deletions

View File

@ -1065,7 +1065,12 @@ public class FtcTest extends FtcTeleOp
{
int lineNum = 9;
robot.vision.displayExposureSettings(lineNum++);
if (robot.vision.vision != null)
{
// displayExposureSettings is only available for VisionPortal.
robot.vision.displayExposureSettings(lineNum++);
}
if (robot.vision.rawColorBlobVision != null)
{
robot.vision.getDetectedRawColorBlob(lineNum++);

View File

@ -85,7 +85,8 @@ public class Robot
if (RobotParams.Preferences.useVision &&
(RobotParams.Preferences.tuneColorBlobVision ||
RobotParams.Preferences.useAprilTagVision ||
RobotParams.Preferences.useColorBlobVision))
RobotParams.Preferences.useColorBlobVision ||
RobotParams.Preferences.useLimelightVision))
{
vision = new Vision(this);
}

View File

@ -35,6 +35,7 @@ import trclib.dataprocessor.TrcUtil;
import trclib.drivebase.TrcDriveBase.DriveOrientation;
import trclib.driverio.TrcGameController.DriveMode;
import trclib.pathdrive.TrcPose2D;
import trclib.pathdrive.TrcPose3D;
import trclib.robotcore.TrcPidController.PidCoefficients;
import trclib.vision.TrcHomographyMapper;
@ -146,6 +147,7 @@ public class RobotParams
public static final boolean tuneColorBlobVision = false;
public static final boolean useAprilTagVision = false;
public static final boolean useColorBlobVision = false;
public static final boolean useLimelightVision = false;
public static final boolean showVisionView = !inCompetition;
public static final boolean showVisionStat = false;
// Drive Base
@ -175,7 +177,7 @@ public class RobotParams
camPitch = 15.0; // degrees down from horizontal
camYaw = 0.0; // degrees clockwise from robot front
camRoll = 0.0;
camPose = new TrcPose2D(camXOffset, camYOffset, camYaw);
camPose = new TrcPose3D(camXOffset, camYOffset, camZOffset, camYaw, camPitch, camRoll);
camOrientation = OpenCvCameraRotation.UPRIGHT;
// Homography: cameraRect in pixels, worldRect in inches
cameraRect = new TrcHomographyMapper.Rectangle(
@ -192,7 +194,7 @@ public class RobotParams
} //class FrontCamParams
/**
* This class contains the parameters of the front camera.
* This class contains the parameters of the back camera.
*/
public static class BackCamParams extends FtcRobotDrive.VisionInfo
{
@ -207,7 +209,7 @@ public class RobotParams
camPitch = 15.0; // degrees down from horizontal
camYaw = 0.0; // degrees clockwise from robot front
camRoll = 0.0;
camPose = new TrcPose2D(camXOffset, camYOffset, camYaw);
camPose = new TrcPose3D(camXOffset, camYOffset, camZOffset, camYaw, camPitch, camRoll);
camOrientation = OpenCvCameraRotation.UPRIGHT;
// Homography: cameraRect in pixels, worldRect in inches
cameraRect = new TrcHomographyMapper.Rectangle(
@ -223,6 +225,38 @@ public class RobotParams
} //BackCamParams
} //class BackCamParams
/**
* This class contains the parameters of the Limelight vision processor.
*/
public static class LimelightParams extends FtcRobotDrive.VisionInfo
{
public LimelightParams()
{
camName = "limelight3a";
camImageWidth = 640;
camImageHeight = 480;
camXOffset = 0.0; // Inches to the right from robot center
camYOffset = 2.0; // Inches forward from robot center
camZOffset = 9.75; // Inches up from the floor
camPitch = 15.0; // degrees down from horizontal
camYaw = 0.0; // degrees clockwise from robot front
camRoll = 0.0;
camPose = new TrcPose3D(camXOffset, camYOffset, camZOffset, camYaw, camPitch, camRoll);
camOrientation = OpenCvCameraRotation.UPRIGHT;
// Homography: cameraRect in pixels, worldRect in inches
cameraRect = new TrcHomographyMapper.Rectangle(
0.0, 120.0, // Camera Top Left
camImageWidth - 1, 120.0, // Camera Top Right
0.0, camImageHeight - 1, // Camera Bottom Left
camImageWidth - 1, camImageHeight - 1); // Camera Bottom Right
worldRect = new TrcHomographyMapper.Rectangle(
-12.5626, 48.0 - Robot.ROBOT_LENGTH/2.0 - camYOffset, // World Top Left
11.4375, 44.75 - Robot.ROBOT_LENGTH/2.0 - camYOffset, // World Top Right
-2.5625, 21.0 - Robot.ROBOT_LENGTH/2.0 - camYOffset, // World Bottom Left
2.5626, 21.0 - Robot.ROBOT_LENGTH/2.0 - camYOffset); // World Bottom Right
} //LimelightParams
} //class LimelightParams
public static class VisionOnlyParams extends FtcRobotDrive.RobotInfo
{
public VisionOnlyParams()
@ -232,6 +266,7 @@ public class RobotParams
webCam1 = new FrontCamParams();
// Back Camera
webCam2 = new BackCamParams();
limelight = new LimelightParams();
} //VisionOnlyParams
} //class VisionOnlyParams
@ -294,6 +329,7 @@ public class RobotParams
// Vision
webCam1 = new FrontCamParams();
webCam2 = new BackCamParams();
limelight = new LimelightParams();
// Miscellaneous
blinkinName = "blinkin";
} //DifferentialParams
@ -362,6 +398,7 @@ public class RobotParams
// Vision
webCam1 = new FrontCamParams();
webCam2 = new BackCamParams();
limelight = new LimelightParams();
// Miscellaneous
blinkinName = "blinkin";
} //MecanumParams
@ -427,6 +464,7 @@ public class RobotParams
// Vision
webCam1 = new FrontCamParams();
webCam2 = new BackCamParams();
limelight = new LimelightParams();
// Miscellaneous
blinkinName = "blinkin";
// Steer Encoders

View File

@ -22,6 +22,8 @@
package teamcode.vision;
import com.qualcomm.hardware.limelightvision.LLResult;
import org.firstinspires.ftc.robotcore.external.hardware.camera.BuiltinCameraDirection;
import org.firstinspires.ftc.robotcore.external.hardware.camera.WebcamName;
import org.firstinspires.ftc.robotcore.external.navigation.AngleUnit;
@ -36,6 +38,7 @@ import java.util.ArrayList;
import ftclib.robotcore.FtcOpMode;
import ftclib.vision.FtcEocvColorBlobProcessor;
import ftclib.vision.FtcLimelightVision;
import ftclib.vision.FtcRawEocvColorBlobPipeline;
import ftclib.vision.FtcRawEocvVision;
import ftclib.vision.FtcVision;
@ -51,9 +54,9 @@ import trclib.vision.TrcOpenCvDetector;
import trclib.vision.TrcVisionTargetInfo;
/**
* This class implements AprilTag/Eocv Vision for the game season. It creates and initializes all the vision target
* info as well as providing info for the robot, camera and the field. It also provides methods to get the location
* of the robot and detected targets.
* This class implements AprilTag/Eocv/Limelight Vision for the game season. It creates and initializes all the vision
* target info as well as providing info for the robot, camera and the field. It also provides methods to get the
* location of the robot and detected targets.
*/
public class Vision
{
@ -75,11 +78,6 @@ public class Vision
.setVerticesRange(0.0, 1000.0)
.setAspectRatioRange(0.3, 1.0);
private static final String TFOD_MODEL_ASSET = "MyObject.tflite";
private static final float TFOD_MIN_CONFIDENCE = 0.75f;
public static final String TFOD_OBJECT_LABEL = "MyObject";
public static final String[] TFOD_TARGET_LABELS = {TFOD_OBJECT_LABEL};
private final TrcDbgTrace tracer;
private final Robot robot;
private final WebcamName webcam1, webcam2;
@ -92,6 +90,7 @@ public class Vision
public FtcVisionEocvColorBlob blueBlobVision;
private FtcEocvColorBlobProcessor blueBlobProcessor;
public FtcVision vision;
public FtcLimelightVision limelightVision;
/**
* Constructor: Create an instance of the object.
@ -142,6 +141,11 @@ public class Vision
rawColorBlobVision.setFpsMeterEnabled(RobotParams.Preferences.showVisionStat);
setRawColorBlobVisionEnabled(false);
}
else if (RobotParams.Preferences.useLimelightVision)
{
limelightVision = new FtcLimelightVision(
robot.robotInfo.limelight.camName, robot.robotInfo.limelight.camPose, this::getTargetGroundOffset);
}
else
{
// Creating Vision Processors for VisionPortal.
@ -315,7 +319,6 @@ public class Vision
}
} //setRawColorBlobThresholds
/**
* This method enables/disables raw ColorBlob vision.
*
@ -423,7 +426,9 @@ public class Vision
TrcPose2D aprilTagPose =
RobotParams.Game.APRILTAG_POSES[aprilTagInfo.detectedObj.aprilTagDetection.id - 1];
TrcPose2D cameraPose = aprilTagPose.subtractRelativePose(aprilTagInfo.objPose);
robotPose = cameraPose.subtractRelativePose(robot.robotInfo.webCam1.camPose);
robotPose = cameraPose.subtractRelativePose(
new TrcPose2D(robot.robotInfo.webCam1.camXOffset, robot.robotInfo.webCam1.camYOffset,
robot.robotInfo.webCam1.camYaw));
tracer.traceInfo(
moduleName,
"AprilTagId=" + aprilTagInfo.detectedObj.aprilTagDetection.id +
@ -556,6 +561,17 @@ public class Vision
return colorBlobInfo;
} //getDetectedBlueBlob
/**
* This method returns the target Z offset from ground.
*
* @param result specifies the detected object.
* @return target ground offset.
*/
private double getTargetGroundOffset(LLResult result)
{
return 0.0;
} //getTargetGroundOffset
/**
* This method is called by the Arrays.sort to sort the target object by increasing distance.
*