如何在Cocoa OpenGL程序中显示原始YUV帧

Rea*_*ion 1 opengl macos rgb cocoa yuv

我被分配了编写程序的任务,该程序采用示例原始YUV文件并在Cocoa OpenGL程序中显示它.

我是一名实习生,我很少或根本不知道如何开始.我一直在阅读有关YUV的维基百科和文章,但我找不到任何关于如何打开原始YUV文件,提取数据并将其转换为RGB并在视图窗口中显示的良好源代码.

从本质上讲,我需要帮助与任务的以下几个方面 - 如何从样本YUV文件中提取YUV数据 - 如何给YUV数据转换为RGB色彩空间 - 如何显示在OpenGL RGB色彩空间.(这个我认为我可以随着时间的推移弄清楚,但我真的需要帮助前两点)

请告诉我要使用的课程,或者指出我可以学习YUV图形/视频显示的地方

Bra*_*son 8

我用从CCD相机捕获的YUV帧完成了这个.不幸的是,有许多不同的YUV格式.我相信Apple用于GL_YCBCR_422_APPLE纹理格式的技术上是2VUY422.要将图像从IIDC Firewire相机生成的YUV422帧转换为2VUY422,我使用了以下内容:

void yuv422_2vuy422(const unsigned char *theYUVFrame, unsigned char *the422Frame, const unsigned int width, const unsigned int height) 
{
    int i =0, j=0;
    unsigned int numPixels = width * height;
    unsigned int totalNumberOfPasses = numPixels * 2;
    register unsigned int y0, y1, y2, y3, u0, u2, v0, v2;

    while (i < (totalNumberOfPasses) )
    {
        u0 = theYUVFrame[i++]-128;
        y0 = theYUVFrame[i++];
        v0 = theYUVFrame[i++]-128;
        y1 = theYUVFrame[i++];
        u2 = theYUVFrame[i++]-128;
        y2 = theYUVFrame[i++];
        v2 = theYUVFrame[i++]-128;
        y3 = theYUVFrame[i++];

        // U0 Y0 V0 Y1 U2 Y2 V2 Y3

        // Remap the values to 2VUY (YUYS?) (Y422) colorspace for OpenGL
        // Y0 U Y1 V Y2 U Y3 V

        // IIDC cameras are full-range y=[0..255], u,v=[-127..+127], where display is "video range" (y=[16..240], u,v=[16..236])

        the422Frame[j++] = ((y0 * 240) / 255 + 16);
        the422Frame[j++] = ((u0 * 236) / 255 + 128);
        the422Frame[j++] = ((y1 * 240) / 255 + 16);
        the422Frame[j++] = ((v0 * 236) / 255 + 128);
        the422Frame[j++] = ((y2 * 240) / 255 + 16);
        the422Frame[j++] = ((u2 * 236) / 255 + 128);
        the422Frame[j++] = ((y3 * 240) / 255 + 16);
        the422Frame[j++] = ((v2 * 236) / 255 + 128);
    }
}
Run Code Online (Sandbox Code Playgroud)

为了有效显示YUV视频源,您可能希望使用Apple的客户端存储扩展,您可以使用以下内容进行设置:

glEnable(GL_TEXTURE_RECTANGLE_EXT);
glBindTexture(GL_TEXTURE_RECTANGLE_EXT, 1);

glTextureRangeAPPLE(GL_TEXTURE_RECTANGLE_EXT, videoImageWidth * videoImageHeight * 2, videoTexture);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_STORAGE_HINT_APPLE , GL_STORAGE_SHARED_APPLE);
glPixelStorei(GL_UNPACK_CLIENT_STORAGE_APPLE, GL_TRUE);

glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_RECTANGLE_EXT, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);

glTexImage2D(GL_TEXTURE_RECTANGLE_EXT, 0, GL_RGBA, videoImageWidth, videoImageHeight, 0, GL_YCBCR_422_APPLE, GL_UNSIGNED_SHORT_8_8_REV_APPLE, videoTexture);    
Run Code Online (Sandbox Code Playgroud)

这使您可以在每个帧显示在屏幕上之前快速更改存储在客户端视频纹理中的数据.

要绘制,您可以使用如下代码:

glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);         
glEnable(GL_TEXTURE_2D);

glViewport(0, 0, [self frame].size.width, [self frame].size.height);

glMatrixMode(GL_PROJECTION);
glLoadIdentity();
NSRect bounds = NSRectFromCGRect([self bounds]);
glOrtho( (GLfloat)NSMinX(bounds), (GLfloat)NSMaxX(bounds), (GLfloat)NSMinY(bounds), (GLfloat)NSMaxY(bounds), -1.0, 1.0);

glBindTexture(GL_TEXTURE_RECTANGLE_EXT, 1);
glTexSubImage2D (GL_TEXTURE_RECTANGLE_EXT, 0, 0, 0, videoImageWidth, videoImageHeight, GL_YCBCR_422_APPLE, GL_UNSIGNED_SHORT_8_8_REV_APPLE, videoTexture);

glMatrixMode(GL_TEXTURE);
glLoadIdentity();

glBegin(GL_QUADS);
    glTexCoord2f(0.0f, 0.0f);
    glVertex2f(0.0f, videoImageHeight);

    glTexCoord2f(0.0f, videoImageHeight);
    glVertex2f(0.0f, 0.0f);

    glTexCoord2f(videoImageWidth, videoImageHeight);
    glVertex2f(videoImageWidth, 0.0f);

    glTexCoord2f(videoImageWidth, 0.0f);
    glVertex2f(videoImageWidth, videoImageHeight);      
glEnd();
Run Code Online (Sandbox Code Playgroud)


Jen*_*ton 6

Adam Rosenfield的评论不正确.在Mac上,您可以使用GL_YCBCR_422_APPLE纹理格式显示YCbCr(数字等效于YUV)纹理,如APPLE_ycbcr_422扩展名中所指定.