如何在iPhone中将CVPixelBufferPool与AVAssetWriterInputPixelBufferAdaptor结合使用?

Atu*_*ain 15 iphone objective-c avfoundation avassetwriter

我已使用以下代码从图像成功创建了视频

-(void)writeImageAsMovie:(NSArray *)array toPath:(NSString*)path size:(CGSize)size duration:(int)duration 
{
    NSError *error = nil;
    AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                                  [NSURL fileURLWithPath:path] fileType:AVFileTypeQuickTimeMovie
                                                              error:&error];
    NSParameterAssert(videoWriter);

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                                   AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height], AVVideoHeightKey,
                                   nil];
    AVAssetWriterInput* writerInput = [[AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings] retain];

    AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                     assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput
                                                     sourcePixelBufferAttributes:nil];
    NSParameterAssert(writerInput);
    NSParameterAssert([videoWriter canAddInput:writerInput]);
    [videoWriter addInput:writerInput];


    //Start a session:
    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    CVPixelBufferRef buffer = NULL;
    buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:0] CGImage]];
    [adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

    //Write samples:
    for (int i = 0;i<[array count]; i++)
    {
        if([writerInput isReadyForMoreMediaData])
        {
            NSLog(@"inside for loop %d",i);
            CMTime frameTime = CMTimeMake(1, 20);

            CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 24 of the loop above

            CMTime presentTime=CMTimeAdd(lastTime, frameTime);

            buffer = [self pixelBufferFromCGImage:[[array objectAtIndex:i] CGImage]];

            [adaptor appendPixelBuffer:buffer withPresentationTime:presentTime];

        }
        else
        {
            NSLog(@"error");
            i--;
        }
    }
    NSLog(@"outside for loop");

    //Finish the session:
    [writerInput markAsFinished];
    [videoWriter finishWriting];
}
Run Code Online (Sandbox Code Playgroud)

我在这里用过CVPixelBufferRef.而不是这个,我想CVPixelBufferPoolRef结合使用AVAssetWriterInputPixelBufferAdaptor.

任何人都可以提供我可以调试和使用的示例吗?

rad*_*cal 14

您正在传递nil'sourcePixelBufferAttributes',因此不会创建像素缓冲池:

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
Run Code Online (Sandbox Code Playgroud)

而是传递一些属性,例如:

NSDictionary *bufferAttributes = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];
Run Code Online (Sandbox Code Playgroud)

然后您可以使用池来创建像素缓冲区,例如:

CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor.pixelBufferPool, &pixelBuffer);
Run Code Online (Sandbox Code Playgroud)

  • 使用缓冲池和适配器的性能是否明显(或可测量)更好? (3认同)

小智 6

@Atulkumar V. Jain:好极了!祝你好运^^ @Brian:你是对的,谢谢,我更正了,我现在正在努力工作这里是工作代码(如果别人需要它:-))

CVPixelBufferRef buffer = NULL;
buffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:0] CGImage]];
CVPixelBufferPoolCreatePixelBuffer (NULL, adaptor_.pixelBufferPool, &buffer);

[adaptor_ appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];

__block UInt64 convertedByteCount = 0;
dispatch_queue_t mediaInputQueue =  dispatch_queue_create("mediaInputQueue", NULL);
static int i = 1;
int frameNumber = [imagesArray count];

[writerInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^{
    while (1){
        if (i == frameNumber) {
            break;
        }
        if ([writerInput isReadyForMoreMediaData]) {

            CVPixelBufferRef sampleBuffer = [self pixelBufferFromCGImage:[[imagesArray objectAtIndex:i] CGImage]];
            NSLog(@"inside for loop %d",i);
            CMTime frameTime = CMTimeMake(1, 20);

           CMTime lastTime=CMTimeMake(i, 20); //i is from 0 to 19 of the loop above

           CMTime presentTime=CMTimeAdd(lastTime, frameTime);       

        if (sampleBuffer) {
                [adaptor_ appendPixelBuffer:sampleBuffer withPresentationTime:presentTime];
                i++;
                CFRelease(sampleBuffer);
            } else {
                break;
            }
        }
    }
    NSLog (@"done");
    [writerInput markAsFinished];
    [videoWriter finishWriting];     

    CVPixelBufferPoolRelease(adaptor_.pixelBufferPool);
    [videoWriter release];
    [writerInput release];      
    [imagesArray removeAllObjects];


}];
Run Code Online (Sandbox Code Playgroud)