从相机预览中获取触摸像素的颜色

Meh*_*sar 5 camera android image-processing surfaceview

我创建了一个打开相机的演示应用程序.现在我想在用户触摸实时相机预览时获得像素的颜色.

我已经尝试过重写onTouchEvent,我getting the position of pixel在x中取得成功,but我没有RGB color value从中得到它.总是显示0,0,0

欢迎所有建议,包括实现相同功能的任何替代方法.[ Excluding OpenCv because it requires to install OpenCvManager apk also to support my application]

代码:

    public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {

    private Camera camera;
    private SurfaceHolder holder;
    int[] myPixels;

    public CameraPreview(Context context, AttributeSet attrs, int defStyle) {
        super(context, attrs, defStyle);
    }

    public CameraPreview(Context context, AttributeSet attrs) {
        super(context, attrs);
    }

    public CameraPreview(Context context) {
        super(context);
    }

    public void init(Camera camera) {
        this.camera = camera;
        initSurfaceHolder();
    }

    @SuppressWarnings("deprecation") // needed for < 3.0
    private void initSurfaceHolder() {
        holder = getHolder();
        holder.addCallback(this);
        holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        initCamera(holder);
    }

    private void initCamera(SurfaceHolder holder) {
        try {
            camera.setPreviewDisplay(holder);
            camera.getParameters().setPreviewFormat(ImageFormat.NV21);
            camera.setPreviewCallback(this);
            camera.startPreview();
        } catch (Exception e) {
            Log.d("Error setting camera preview", e);
        }
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
    }

    @Override
    public boolean onTouchEvent(MotionEvent event) {
        if(event.getAction() == MotionEvent.ACTION_DOWN)
        {
            android.util.Log.d("touched", "called");
/*          int x = (int)event.getX();
            int y = (int)event.getY();

            android.util.Log.d("touched pixel :", x+" "+y);

            setDrawingCacheEnabled(true);
            buildDrawingCache();
            Bitmap mBmp = getDrawingCache();

            int pixel = mBmp.getPixel(x, y);
            int redValue = Color.red(pixel);
            int blueValue = Color.blue(pixel);
            int greenValue = Color.green(pixel);        

            android.util.Log.d("touched pixel color :", redValue+" "+greenValue+" "+blueValue);
            android.util.Log.d("touched pixel color from preview:", redValue+" "+greenValue+" "+blueValue);
*/
            //how to get particular pixel from myPixels[]
        }
        return false;
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {

        android.util.Log.d("onPreviewFrame", "called");

         int frameHeight = camera.getParameters().getPreviewSize().height;
            int frameWidth = camera.getParameters().getPreviewSize().width;
            // number of pixels//transforms NV21 pixel data into RGB pixels  
            int rgb[] = new int[frameWidth * frameHeight];
            // convertion
            myPixels = decodeYUV420SP(rgb, data, frameWidth, frameHeight);

    }



    public int[] decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {

        // here we're using our own internal PImage attributes
        final int frameSize = width * height;

        for (int j = 0, yp = 0; j < height; j++) {
                int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
                for (int i = 0; i < width; i++, yp++) {
                        int y = (0xff & ((int) yuv420sp[yp])) - 16;
                        if (y < 0)
                                y = 0;
                        if ((i & 1) == 0) {
                                v = (0xff & yuv420sp[uvp++]) - 128;
                                u = (0xff & yuv420sp[uvp++]) - 128;
                        }

                        int y1192 = 1192 * y;
                        int r = (y1192 + 1634 * v);
                        int g = (y1192 - 833 * v - 400 * u);
                        int b = (y1192 + 2066 * u);

                        if (r < 0)
                                r = 0;
                        else if (r > 262143)
                                r = 262143;
                        if (g < 0)
                                g = 0;
                        else if (g > 262143)
                                g = 262143;
                        if (b < 0)
                                b = 0;
                        else if (b > 262143)
                                b = 262143;

                        // use interal buffer instead of pixels for UX reasons
                        rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000)
                                        | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
                }
        }

        return rgb;
}


}
Run Code Online (Sandbox Code Playgroud)

Meh*_*sar 4

我采用了不同的方法来解决它。我一有空就会发布代码。

Algorithm :
Run Code Online (Sandbox Code Playgroud)
  • 在实时摄像机上创建叠加层
  • 当用户触摸时,用从实时摄像头获取的最新 YUV 缓冲流的 RGB 数据更新叠加层
  • 从叠加图像中选择 RGB 颜色