修复使用 OpenGlView 进行流式传输的绿色 YUYV 帧格式

6 java android opencv image-processing kotlin

我正在使用一个库从 Android 中的 USB/UVC 相机进行 RTMP 流传输。我正在使用的库就是这个。该库适用于 RGB 的 Android USB 相机。但是,我正在使用具有 YUYV 帧格式的热像仪,因此初始相机图像如下:

在此输入图像描述

该图像格式为 YUV,需要进行一些处理才能查看图像。因此,我想知道如何从相机中抓取帧,应用处理,然后更新预览。

我的问题是,当从我的设备进行流式传输时,接收流的端点/设备也会看到这个绿色图像,因为它需要一些处理,而且我不知道如何在 android 中纠正这个问题。

在 Windows 中连接时,我可以通过执行以下步骤处理图像使其可见:

cap = cv2.VideoCapture(-1)
cap.set(cv2.CAP_PROP_CONVERT_RGB, 0)

while(True):
    ret, frame = cap.read()
    frame = frame.reshape(292, 384, 2)
    # I would remove the last 4 extra rows
    frame = frame[0:288, ...]
    # Convert image to uint16
    dt = np.dtype(('<u2', [('x', np.uint8, 2]))
    frame = frame.view(dtype=dt).astype(np.float32)
    
    cv2.imshow('frame', gray)
if cv2.waiKey(1) & 0xFF = ord('q'):
    break
Run Code Online (Sandbox Code Playgroud)

这将输出一个 16 位灰度帧

不幸的是,我在 Android 上的图像处理方面没有丰富的经验,所以我不确定是否必须重复在 Python 中完成的相同步骤,但在 Android 上是否有任何替代/更简单的方法来修复 OpenGlView 流式传输的帧。流库使用 OpenGLView 来显示帧

<com.pedro.rtplibrary.view.OpenGlView
    android:id="@+id/openglview"
    android:layout_width="match_parent"
    android:layout_height="match_parent"
    app:keepAspectRatio="true"
    />
Run Code Online (Sandbox Code Playgroud)

这是处理相机检测的流服务

import android.app.Service
import android.content.Context
import android.content.Intent
import android.hardware.usb.UsbDevice
import android.os.Binder
import android.os.Build
import android.os.IBinder
import android.util.Log
import androidx.annotation.RequiresApi
import androidx.core.app.NotificationCompat
import com.pedro.rtplibrary.view.OpenGlView
import com.serenegiant.usb.USBMonitor
import com.serenegiant.usb.UVCCamera
import net.ossrs.rtmp.ConnectCheckerRtmp

/**
 * Basic RTMP/RTSP service streaming implementation with camera2
 */
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
class StreamService : Service() {
    companion object {
        private const val TAG = "RtpService"
        private const val channelId = "rtpStreamChannel"
        private const val notifyId = 123456
        private const val width = 384
        private const val height = 292

        var openGlView: OpenGlView? = null
    }

    val isStreaming: Boolean get() = endpoint != null

    private var endpoint: String? = null
    private var rtmpUSB: RtmpUSB? = null
    private var uvcCamera: UVCCamera? = null
    private var usbMonitor: USBMonitor? = null
    private val notificationManager: NotificationManager by lazy { getSystemService(Context.NOTIFICATION_SERVICE) as NotificationManager }

    override fun onCreate() {
        super.onCreate()
        Log.e(TAG, "RTP service create")
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.O) {
            val channel = NotificationChannel(channelId, channelId, NotificationManager.IMPORTANCE_HIGH)
            notificationManager.createNotificationChannel(channel)
        }
        keepAliveTrick()
    }

    private fun keepAliveTrick() {
        if (Build.VERSION.SDK_INT > Build.VERSION_CODES.O) {
            val notification = NotificationCompat.Builder(this, channelId)
                .setOngoing(true)
                .setContentTitle("")
                .setContentText("").build()
            startForeground(1, notification)
        } else {
            startForeground(1, Notification())
        }
    }

    override fun onStartCommand(intent: Intent?, flags: Int, startId: Int): Int {
        Log.e(TAG, "RTP service started")
        usbMonitor = USBMonitor(this, onDeviceConnectListener).apply {
            register()
        }
        return START_STICKY
    }

    private fun prepareStreamRtp() {
        stopStream()
        stopPreview()

        rtmpUSB = if (openGlView == null) {
            RtmpUSB(this, connectCheckerRtmp)
        } else {
            RtmpUSB(openGlView, connectCheckerRtmp)
        }
    }

    fun startStreamRtp(endpoint: String): Boolean {
        if (rtmpUSB?.isStreaming == false) {
            this.endpoint = endpoint
            if (rtmpUSB!!.prepareVideo(width, height, 30, 4000 * 1024, false, 0, uvcCamera) && rtmpUSB!!.prepareAudio()) {
                rtmpUSB!!.startStream(uvcCamera, endpoint)
                return true
            }
        }
        return false
    }

    fun setView(view: OpenGlView) {
        openGlView = view
        rtmpUSB?.replaceView(openGlView, uvcCamera)
    }

    fun setView(context: Context) {
        openGlView = null
        rtmpUSB?.replaceView(context, uvcCamera)
    }

    fun startPreview() {
        rtmpUSB?.startPreview(uvcCamera, width, height)
    }

    private val connectCheckerRtmp = object : ConnectCheckerRtmp {
        override fun onConnectionSuccessRtmp() {
            showNotification("Stream started")
            Log.e(TAG, "RTP connection success")
        }

    private val onDeviceConnectListener = object : USBMonitor.OnDeviceConnectListener {
        override fun onAttach(device: UsbDevice?) {
            usbMonitor!!.requestPermission(device)
        }

        override fun onConnect(device: UsbDevice?, ctrlBlock: USBMonitor.UsbControlBlock?, createNew: Boolean) {
            val camera = UVCCamera()
            camera.open(ctrlBlock)
            try {
                camera.setPreviewSize(width, height, UVCCamera.FRAME_FORMAT_YUYV)
            } catch (e: IllegalArgumentException) {
                camera.destroy()
                try {
                    camera.setPreviewSize(width, height, UVCCamera.DEFAULT_PREVIEW_MODE)
                } catch (e1: IllegalArgumentException) {
                    return
                }
            }
            uvcCamera = camera
            prepareStreamRtp()
            rtmpUSB!!.startPreview(uvcCamera, width, height)
            endpoint?.let { startStreamRtp(it) }
        }
    }
}
Run Code Online (Sandbox Code Playgroud)

调用流服务库进行流传输的MainActivity:

import android.Manifest.permission.CAMERA
import android.Manifest.permission.READ_EXTERNAL_STORAGE
import android.Manifest.permission.RECORD_AUDIO
import android.Manifest.permission.WRITE_EXTERNAL_STORAGE
import android.content.ComponentName
import android.content.Context
import android.content.Intent
import android.content.ServiceConnection
import android.os.Bundle
import android.os.IBinder
import android.util.Log
import android.view.SurfaceHolder
import android.view.View
import android.widget.Button
import androidx.activity.viewModels
import androidx.appcompat.app.AppCompatActivity
import androidx.core.app.ActivityCompat.requestPermissions
import com.pedro.rtplibrary.view.OpenGlView
import dagger.hilt.android.AndroidEntryPoint
import dev.alejandrorosas.apptemplate.MainViewModel.ViewState
import dev.alejandrorosas.streamlib.StreamService
import org.opencv.android.OpenCVLoader

@AndroidEntryPoint
class MainActivity : AppCompatActivity(R.layout.activity_main), SurfaceHolder.Callback, ServiceConnection {

    private val viewModel by viewModels<MainViewModel>()
    private var mService: StreamService? = null

    override fun onCreate(savedInstanceState: Bundle?) {
        Log.d("OPENCV", "OPENCV Loading Status ${OpenCVLoader.initDebug()}")

        super.onCreate(savedInstanceState)
        StreamService.openGlView = findViewById(R.id.openglview)
        startService(getServiceIntent())

        viewModel.serviceLiveEvent.observe(this) { mService?.let(it) }
        viewModel.getViewState().observe(this) { render(it) }

        findViewById<View>(R.id.settings_button).setOnClickListener { startActivity(Intent(this, SettingsActivity::class.java)) }
        findViewById<OpenGlView>(R.id.openglview).holder.addCallback(this)
        findViewById<Button>(R.id.start_stop_stream).setOnClickListener { viewModel.onStreamControlButtonClick() }

        requestPermissions(this, arrayOf(READ_EXTERNAL_STORAGE, RECORD_AUDIO, CAMERA, WRITE_EXTERNAL_STORAGE), 1)
    }

    private fun render(viewState: ViewState) {
        findViewById<Button>(R.id.start_stop_stream).setText(viewState.streamButtonText)
    }

    private fun getServiceIntent(): Intent {
        return Intent(this, StreamService::class.java).also {
            bindService(it, this, Context.BIND_AUTO_CREATE)
        }
    }


    override fun surfaceChanged(holder: SurfaceHolder, p1: Int, p2: Int, p3: Int) {
        mService?.let {
            it.setView(findViewById<OpenGlView>(R.id.openglview))
            it.startPreview()
        }
    }


    override fun surfaceCreated(holder: SurfaceHolder) {
    }

   }
Run Code Online (Sandbox Code Playgroud)

我在USBBase.Java中实现了调用 USB 摄像头帧的本机函数,如下所示:

 public void startPreview(final UVCCamera uvcCamera, int width, int height) {
        Log.e(TAG, "handleStartPreview:mUVCCamera" + uvcCamera + " mIsPreviewing:");
        if ((uvcCamera == null)) return;
        Log.e(TAG, "handleStartPreview2 ");
        if (!isStreaming() && !onPreview && !(glInterface instanceof OffScreenGlThread)) {
            uvcCamera.setFrameCallback(mIFrameCallback, UVCCamera.PIXEL_FORMAT_RGBX);
            //uvcCamera.setValue(UVCCamera.CTRL_ZOOM_ABS, 0x8800);
            glInterface.setEncoderSize(width, height);
            glInterface.setRotation(0);
            glInterface.start();
            uvcCamera.setPreviewTexture(glInterface.getSurfaceTexture());
            uvcCamera.startPreview();
            onPreview = true;
        } else {
            Log.e(TAG, "Streaming or preview started, ignored");
        }
    }

    // grabs the frame from the camera
    private byte[] FrameData = new byte[384 * 292 * 4];
    private final IFrameCallback mIFrameCallback = new IFrameCallback() {
        @Override
        public void onFrame(final ByteBuffer frameData) {
            Log.d(TAG, "mIFrameCallback: onFrame------");
            frameData.get(FrameData, 0, frameData.capacity());
        }
    };
Run Code Online (Sandbox Code Playgroud)

但是,我不明白这是否是我必须处理的才能从 OpenGlView 流式传输可视图像的方法,或者我如何正确处理 YUYV 帧然后更新我的视图。

我想知道如何修复相机捕获的绿色图像