我已经使用Swift实现了以前的建议(如何在iPhone中使用CVPixelBufferPool和AVAssetWriterInputPixelBufferAdaptor?),但在使用CVPixelBufferPoolCreatePixelBuffer作为指导时,却遇到了"kCVReturnInvalidArgument"(错误值:-6661).
我基本上试图从图像创建一个电影,但由于缓冲池未成功创建,我无法附加像素缓冲区 - 这是我执行此操作的代码.
任何建议都非常感谢!
import Foundation
import Photos
import OpenGLES
import AVFoundation
import CoreMedia
class MovieGenerator {
var _videoWriter:AVAssetWriter
var _videoWriterInput: AVAssetWriterInput
var _adapter: AVAssetWriterInputPixelBufferAdaptor
var _buffer = UnsafeMutablePointer<Unmanaged<CVPixelBuffer>?>.alloc(1)
init(frameSize size: CGSize, outputURL url: NSURL) {
// delete file if exists
let sharedManager = NSFileManager.defaultManager() as NSFileManager
if(sharedManager.fileExistsAtPath(url.path!)) {
sharedManager.removeItemAtPath(url.path, error: nil)
}
// video writer
_videoWriter = AVAssetWriter(URL: url, fileType: AVFileTypeQuickTimeMovie, error: nil)
// writer input
var videoSettings = [AVVideoCodecKey:AVVideoCodecH264, AVVideoWidthKey:size.width, AVVideoHeightKey:size.height]
_videoWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
_videoWriterInput.expectsMediaDataInRealTime = true
_videoWriter.addInput(_videoWriterInput)
// pixel buffer adapter
var adapterAttributes = [kCVPixelBufferPixelFormatTypeKey:kCVPixelFormatType_32BGRA, kCVPixelBufferWidthKey: size.width,
kCVPixelBufferHeightKey: size.height,
kCVPixelFormatOpenGLESCompatibility: kCFBooleanTrue]
_adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: _videoWriterInput, sourcePixelBufferAttributes: adapterAttributes)
var poolCreateResult:CVReturn = CVPixelBufferPoolCreatePixelBuffer(nil, _adapter.pixelBufferPool, _buffer)
println("pool creation:\(poolCreateResult)")
_videoWriter.startWriting()
_videoWriter.startSessionAtSourceTime(kCMTimeZero)
}
func addImage(image:UIImage, frameNum:Int, fps:Int)->Bool {
self.createPixelBufferFromCGImage(image.CGImage, pixelBufferPtr: _buffer)
var presentTime:CMTime = CMTimeMake(Int64(frameNum), Int32(fps))
var result:Bool = _adapter.appendPixelBuffer(_buffer.memory?.takeUnretainedValue(), withPresentationTime: presentTime)
return result
}
func finalizeMovie(timeStamp: CMTime) {
_videoWriterInput.markAsFinished()
_videoWriter.endSessionAtSourceTime(timeStamp)
_videoWriter.finishWritingWithCompletionHandler({println("video writer finished with status: \(self._videoWriter.status)")})
}
func createPixelBufferFromCGImage(image: CGImage, pixelBufferPtr: UnsafeMutablePointer<Unmanaged<CVPixelBuffer>?>) {
let width:UInt = CGImageGetWidth(image)
let height:UInt = CGImageGetHeight(image)
let imageData:CFData = CGDataProviderCopyData(CGImageGetDataProvider(image))
let options:CFDictionary = [kCVPixelBufferCGImageCompatibilityKey:NSNumber.numberWithBool(true), kCVPixelBufferCGBitmapContextCompatibilityKey:NSNumber.numberWithBool(true)]
var status:CVReturn = CVPixelBufferCreate(kCFAllocatorDefault, width, height, OSType(kCVPixelFormatType_32BGRA), options, pixelBufferPtr)
assert(status != 0,"CVPixelBufferCreate: \(status)")
var lockStatus:CVReturn = CVPixelBufferLockBaseAddress(pixelBufferPtr.memory?.takeUnretainedValue(), 0)
println("CVPixelBufferLockBaseAddress: \(lockStatus)")
var pxData:UnsafeMutablePointer<(Void)> = CVPixelBufferGetBaseAddress(pixelBufferPtr.memory?.takeUnretainedValue())
let bitmapinfo = CGBitmapInfo.fromRaw(CGImageAlphaInfo.NoneSkipFirst.toRaw())
let rgbColorSpace:CGColorSpace = CGColorSpaceCreateDeviceRGB()
var context:CGContextRef = CGBitmapContextCreate(pxData, width, height, 8, 4*CGImageGetWidth(image), rgbColorSpace, bitmapinfo!)
CGContextDrawImage(context, CGRectMake(0, 0, CGFloat(width), CGFloat(height)), image)
CVPixelBufferUnlockBaseAddress(pixelBufferPtr.memory?.takeUnretainedValue(), 0)
}
}
Run Code Online (Sandbox Code Playgroud)
令人沮丧的是,我无法准确回答你的问题,但我正在研究基本相同的代码.并且,我的情况恰好比你得到的错误更进一步; 它一直到试图将图像添加到电影中然后因为从未从appendPixelBuffer()获得成功结果而失败 - 我不知道如何找出原因.我发布这篇文章是为了帮助你进一步发展.
(我的代码改编自AVFoundation + AssetWriter:使用图像和音频生成电影,我用你的帖子来帮助导航指针互操作的somanigans ...)
func writeAnimationToMovie(path: String, size: CGSize, animation: Animation) -> Bool {
var error: NSError?
let writer = AVAssetWriter(URL: NSURL(fileURLWithPath: path), fileType: AVFileTypeQuickTimeMovie, error: &error)
let videoSettings = [AVVideoCodecKey: AVVideoCodecH264, AVVideoWidthKey: size.width, AVVideoHeightKey: size.height]
let input = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings)
let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: input, sourcePixelBufferAttributes: nil)
input.expectsMediaDataInRealTime = true
writer.addInput(input)
writer.startWriting()
writer.startSessionAtSourceTime(kCMTimeZero)
var buffer: CVPixelBufferRef
var frameCount = 0
for frame in animation.frames {
let rect = CGRectMake(0, 0, size.width, size.height)
let rectPtr = UnsafeMutablePointer<CGRect>.alloc(1)
rectPtr.memory = rect
buffer = pixelBufferFromCGImage(frame.image.CGImageForProposedRect(rectPtr, context: nil, hints: nil).takeUnretainedValue(), size)
var appendOk = false
var j = 0
while (!appendOk && j < 30) {
if pixelBufferAdaptor.assetWriterInput.readyForMoreMediaData {
let frameTime = CMTimeMake(Int64(frameCount), 10)
appendOk = pixelBufferAdaptor.appendPixelBuffer(buffer, withPresentationTime: frameTime)
// appendOk will always be false
NSThread.sleepForTimeInterval(0.05)
} else {
NSThread.sleepForTimeInterval(0.1)
}
j++
}
if (!appendOk) {
println("Doh, frame \(frame) at offset \(frameCount) failed to append")
}
}
input.markAsFinished()
writer.finishWritingWithCompletionHandler({
if writer.status == AVAssetWriterStatus.Failed {
println("oh noes, an error: \(writer.error.description)")
} else {
println("hrmmm, there should be a movie?")
}
})
return true;
}
Run Code Online (Sandbox Code Playgroud)
在哪里pixelBufferFromCGImage定义如下:
func pixelBufferFromCGImage(image: CGImageRef, size: CGSize) -> CVPixelBufferRef {
let options = [
kCVPixelBufferCGImageCompatibilityKey: true,
kCVPixelBufferCGBitmapContextCompatibilityKey: true]
var pixBufferPointer = UnsafeMutablePointer<Unmanaged<CVPixelBuffer>?>.alloc(1)
let status = CVPixelBufferCreate(
nil,
UInt(size.width), UInt(size.height),
OSType(kCVPixelFormatType_32ARGB),
options,
pixBufferPointer)
CVPixelBufferLockBaseAddress(pixBufferPointer.memory?.takeUnretainedValue(), 0)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let bitmapinfo = CGBitmapInfo.fromRaw(CGImageAlphaInfo.NoneSkipFirst.toRaw())
var pixBufferData:UnsafeMutablePointer<(Void)> = CVPixelBufferGetBaseAddress(pixBufferPointer.memory?.takeUnretainedValue())
let context = CGBitmapContextCreate(
pixBufferData,
UInt(size.width), UInt(size.height),
8, UInt(4 * size.width),
rgbColorSpace, bitmapinfo!)
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0))
CGContextDrawImage(
context,
CGRectMake(0, 0, CGFloat(CGImageGetWidth(image)), CGFloat(CGImageGetHeight(image))),
image)
CVPixelBufferUnlockBaseAddress(pixBufferPointer.memory?.takeUnretainedValue(), 0)
return pixBufferPointer.memory!.takeUnretainedValue()
}
Run Code Online (Sandbox Code Playgroud)
根据文档pixelBufferPool:
在第一次调用关联的 AVAssetWriter 对象上的 startSessionAtTime: 之前,此属性为 NULL。
将呼叫移至CVPixelBufferPoolCreatePixelBuffer末尾init应该可以解决眼前的问题。
其他一些观察结果:
AVAssetWriterInputPixelBufferAdaptor配置 BGRA,但createPixelBufferFromCGImage使用的是 RGB。如果像素格式不匹配,您的最终视频看起来会很奇怪。CVPixelBufferCreate您的createPixelBufferFromCGImage方法。这违背了使用缓冲池的目的。autoreleasepool并小心使用takeUnretainedValuevstakeRetainedValue会有帮助。我发布了使用缓冲池的 Swift 1.2、2.0 和 3.0 的参考实现。
| 归档时间: |
|
| 查看次数: |
3971 次 |
| 最近记录: |