Added support for CameraStreamProcessor.

This commit is contained in:
Titan Robotics Club
2024-10-10 16:46:24 -07:00
parent 06d0d29c59
commit 7612f65b58
3 changed files with 61 additions and 2 deletions

View File

@ -148,9 +148,10 @@ public class RobotParams
public static final boolean useWebCam = false; // false to use Android phone camera.
public static final boolean useBuiltinCamBack = false; // For Android Phone as Robot Controller.
public static final boolean tuneColorBlobVision = false;
public static final boolean useLimelightVision = false;
public static final boolean useCameraStreamProcessor = false;
public static final boolean useAprilTagVision = false;
public static final boolean useColorBlobVision = false;
public static final boolean useLimelightVision = false;
public static final boolean showVisionView = !inCompetition;
public static final boolean showVisionStat = false;
// Drive Base

View File

@ -0,0 +1,49 @@
package teamcode.vision;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import org.firstinspires.ftc.robotcore.external.function.Consumer;
import org.firstinspires.ftc.robotcore.external.function.Continuation;
import org.firstinspires.ftc.robotcore.external.stream.CameraStreamSource;
import org.firstinspires.ftc.robotcore.internal.camera.calibration.CameraCalibration;
import org.firstinspires.ftc.vision.VisionProcessor;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import java.util.concurrent.atomic.AtomicReference;
public class CameraStreamProcessor implements VisionProcessor, CameraStreamSource
{
private final AtomicReference<Bitmap> lastFrame =
new AtomicReference<>(Bitmap.createBitmap(1, 1, Bitmap.Config.RGB_565));
@Override
public void init(int width, int height, CameraCalibration calibration)
{
lastFrame.set(Bitmap.createBitmap(width, height, Bitmap.Config.RGB_565));
}
@Override
public Object processFrame(Mat frame, long captureTimeNanos)
{
Bitmap b = Bitmap.createBitmap(frame.width(), frame.height(), Bitmap.Config.RGB_565);
Utils.matToBitmap(frame, b);
lastFrame.set(b);
return null;
}
@Override
public void onDrawFrame(
Canvas canvas, int onscreenWidth, int onscreenHeight, float scaleBmpPxToCanvasPx, float scaleCanvasDensity,
Object userContext)
{
// do nothing
}
@Override
public void getFrameBitmap(Continuation<? extends Consumer<Bitmap>> continuation)
{
continuation.dispatch(bitmapConsumer -> bitmapConsumer.accept(lastFrame.get()));
}
}

View File

@ -89,6 +89,8 @@ public class Vision
private final WebcamName webcam1, webcam2;
private FtcRawEocvColorBlobPipeline rawColorBlobPipeline;
public FtcRawEocvVision rawColorBlobVision;
public FtcLimelightVision limelightVision;
public CameraStreamProcessor cameraStreamProcessor;
public FtcVisionAprilTag aprilTagVision;
private AprilTagProcessor aprilTagProcessor;
public FtcVisionEocvColorBlob redBlobVision;
@ -96,7 +98,6 @@ public class Vision
public FtcVisionEocvColorBlob blueBlobVision;
private FtcEocvColorBlobProcessor blueBlobProcessor;
public FtcVision vision;
public FtcLimelightVision limelightVision;
/**
* Constructor: Create an instance of the object.
@ -156,6 +157,14 @@ public class Vision
}
// Creating Vision Processors for VisionPortal.
ArrayList<VisionProcessor> visionProcessorsList = new ArrayList<>();
if (RobotParams.Preferences.useCameraStreamProcessor)
{
cameraStreamProcessor = new CameraStreamProcessor();
visionProcessorsList.add(cameraStreamProcessor);
// FtcDashboard.getInstance().startCameraStream(processor, 0);
}
if (RobotParams.Preferences.useAprilTagVision)
{
tracer.traceInfo(moduleName, "Starting AprilTagVision...");