使用Android camera2 API和OpenCV的图像处理框架

Jun*_*ang 5 android opencv image-processing android-camera2

我是Camera2 API的新手。我想在我的Android手机上建立图像处理框架。

步骤1:使用Camera2 API打开相机预览流

步骤2:将预览框架数据馈送到OpenCV进行处理

第三步:在屏幕上实时显示处理结果

目前,我已经使用ImageReaderC ++ OpenCV代码完成了Step1 。但是,我不知道该怎么做。如何在屏幕上显示经过处理的图像?(我要显示正常图像,如果检测到预定义对象,则要覆盖图标)

以下是一些关键代码:

protected void createCameraPreview() {
    try {
        SurfaceTexture texture = textureView.getSurfaceTexture();
        assert texture != null;
        texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
      //  Surface surface = new Surface(texture);
        Surface mImageSurface = mImageReader.getSurface();
        captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
      //  captureRequestBuilder.addTarget(surface);

        captureRequestBuilder.addTarget(mImageSurface);


        cameraDevice.createCaptureSession(Arrays.asList(mImageSurface), new CameraCaptureSession.StateCallback(){
                    @Override
                    public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
                        //The camera is already closed
                        if (null == cameraDevice) {
                            return;
                        }
                        cameraCaptureSessions = cameraCaptureSession;
                        updatePreview();
                    }
                    @Override
                    public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
                        Toast.makeText(MainActivity.this, "Configuration change", Toast.LENGTH_SHORT).show();
                    }
                }, null);
        } catch (CameraAccessException e) {
             e.printStackTrace();
        }
}

protected void updatePreview() {
    if(null == cameraDevice) {
        Log.e(TAG, "updatePreview error, return");
    }
    try {
        cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
    } catch (CameraAccessException e) {
        e.printStackTrace();
    }
}

  private final ImageReader.OnImageAvailableListener mOnImageAvailableListener  = new ImageReader.OnImageAvailableListener() {

    @Override
    public void onImageAvailable(ImageReader reader) {
        Log.e(TAG, "onImageAvailable: " + count++);
        Image img = null;
        img = reader.acquireNextImage();
        try {
            if (img == null) throw new NullPointerException("cannot be null");
            ByteBuffer buffer = img.getPlanes()[0].getBuffer();
            byte[] data = new byte[buffer.remaining()];
            buffer.get(data);
            int width = img.getWidth();
            int height = img.getHeight();

            // ****try to get captured img for display here (synchronized)

            // ****try to process image for detecting the object here (asynchronized)

        } catch (NullPointerException ex) {
            ex.printStackTrace();
        }finally {
            Log.e(TAG, "in the finally! ------------");
            if (img != null)
                img.close();

        }
    }

};
Run Code Online (Sandbox Code Playgroud)