5 objective-c avfoundation ios swift
我有一个Swift类,它可以拍摄图像并创建该图像的10秒视频.
我使用创建视频AVAssetWriter.它输出一个十秒钟的视频.这一切都按预期工作.
func setup(){
//Setup video size
self.videoSize = self.getVideoSizeForImage(self.videoImage!)
//Setup temp video path
self.tempVideoPath = self.getTempVideoPath()
//Setup video writer
var videoWriter = AVAssetWriter(
URL: NSURL(fileURLWithPath: self.tempVideoPath!),
fileType: AVFileTypeMPEG4,
error: nil)
//Setup video writer input
let videoSettings = [
AVVideoCodecKey: AVVideoCodecH264,
AVVideoWidthKey: self.videoSize!.width,
AVVideoHeightKey: self.videoSize!.height
]
let videoWriterInput = AVAssetWriterInput(
mediaType: AVMediaTypeVideo,
outputSettings: videoSettings as [NSObject : AnyObject]
)
videoWriterInput.expectsMediaDataInRealTime = true
//Setup video writer adaptor
let adaptor = AVAssetWriterInputPixelBufferAdaptor(
assetWriterInput: videoWriterInput,
sourcePixelBufferAttributes: [
kCVPixelBufferPixelFormatTypeKey : kCVPixelFormatType_32ARGB,
]
)
videoWriter.addInput(videoWriterInput)
//Setup frame time
self.currentFrameTime = CMTimeMake(0, 10000)
//Start video writing session
videoWriter.startWriting()
videoWriter.startSessionAtSourceTime(self.currentFrameTime!)
self.currentVideoWriter = videoWriter
self.currentVideoWriterAdaptor = adaptor
}
func addImageToVideoWriter(image:UIImage, duration:CGFloat){
//Get image pixel buffer
let buffer = self.pixelBufferFromImage(image)
var frameTime = self.currentFrameTime!
//Add pixel buffer to video
let adaptor = self.currentVideoWriterAdaptor!
while(!adaptor.assetWriterInput.readyForMoreMediaData){}
if adaptor.assetWriterInput.readyForMoreMediaData{
adaptor.appendPixelBuffer(
buffer,
withPresentationTime: frameTime
)
let seconds = CGFloat(CMTimeGetSeconds(frameTime))+duration
var timescale = frameTime.timescale
var value = CGFloat(timescale) * seconds
frameTime.value = Int64(value)
frameTime.timescale = Int32(timescale)
self.currentFrameTime = frameTime
self.lastImage = image
}
}
func pixelBufferFromImage(image:UIImage) -> CVPixelBufferRef{
let size = image.size
var pixelBuffer: Unmanaged<CVPixelBuffer>?
let bufferOptions:CFDictionary = [
"kCVPixelBufferCGBitmapContextCompatibilityKey": NSNumber(bool: true),
"kCVPixelBufferCGImageCompatibilityKey": NSNumber(bool: true)
]
let status:CVReturn = CVPixelBufferCreate(
nil,
Int(size.width),
Int(size.height),
OSType(kCVPixelFormatType_32ARGB),
bufferOptions,
&pixelBuffer
)
let managedPixelBuffer = pixelBuffer!.takeRetainedValue()
let lockStatus = CVPixelBufferLockBaseAddress(managedPixelBuffer, 0)
let pixelData = CVPixelBufferGetBaseAddress(managedPixelBuffer)
let bitmapInfo = CGBitmapInfo(rawValue: CGImageAlphaInfo.PremultipliedFirst.rawValue)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGBitmapContextCreate(
pixelData,
Int(size.width),
Int(size.height),
8,
Int(4 * size.width),
rgbColorSpace,
bitmapInfo
)
CGContextDrawImage(context, CGRectMake(0, 0, size.width, size.height), image.CGImage)
CVPixelBufferUnlockBaseAddress(managedPixelBuffer, 0)
return managedPixelBuffer
}
func saveVideoWriterToDisk(){
self.addImageToVideoWriter(self.lastImage!, duration: 0)
self.currentVideoWriter?.endSessionAtSourceTime(self.currentFrameTime!)
self.currentVideoWriterAdaptor?.assetWriterInput.markAsFinished()
//Write video to disk
let semaphore = dispatch_semaphore_create(0)
self.currentVideoWriter?.finishWritingWithCompletionHandler({
dispatch_semaphore_signal(semaphore)
})
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER)
}
Run Code Online (Sandbox Code Playgroud)
然后我使用AVAssetExportSession并AVMutableComposition为该视频添加音乐.这大部分时间都有效.
func exportVideo(sourceFilePath:String, destinationFilePath:String) -> Bool{
let fileManager = NSFileManager()
var success = false
//Compile audio and video together
let composition = AVMutableComposition()
/*
//Setup audio track
let trackAudio:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(
AVMediaTypeAudio,
preferredTrackID: CMPersistentTrackID()
)
//Add audio file to audio track
let audioFileAsset = AVURLAsset(URL:self.audioFileURL, options:nil)
let audioFileAssetTrack = audioFileAsset.tracksWithMediaType(AVMediaTypeAudio).last as! AVAssetTrack
trackAudio.insertTimeRange(
audioFileAssetTrack.timeRange,
ofTrack: audioFileAssetTrack,
atTime: kCMTimeZero,
error: nil
)*/
//Setup video track
let trackVideo:AVMutableCompositionTrack = composition.addMutableTrackWithMediaType(
AVMediaTypeVideo,
preferredTrackID: CMPersistentTrackID()
)
//Add video file to video track
let videoFileAsset = AVURLAsset(URL: NSURL(fileURLWithPath: sourceFilePath), options: nil)
let videoFileAssetTracks = videoFileAsset.tracksWithMediaType(AVMediaTypeVideo)
if videoFileAssetTracks.count > 0{
let videoFileAssetTrack:AVAssetTrack = videoFileAssetTracks[0] as! AVAssetTrack
trackVideo.insertTimeRange(
videoFileAssetTrack.timeRange,
ofTrack: videoFileAssetTrack,
atTime: kCMTimeZero,
error: nil
)
}
//Export compiled video to disk
if fileManager.fileExistsAtPath(destinationFilePath){
fileManager.removeItemAtPath(destinationFilePath, error: nil)
}
let exporter = AVAssetExportSession(
asset: composition,
presetName: AVAssetExportPresetHighestQuality
)
exporter.outputFileType = AVFileTypeMPEG4
exporter.outputURL = NSURL(fileURLWithPath: destinationFilePath)
let semaphore = dispatch_semaphore_create(0)
exporter.exportAsynchronouslyWithCompletionHandler({
success = true
dispatch_semaphore_signal(semaphore)
})
dispatch_semaphore_wait(semaphore, DISPATCH_TIME_FOREVER)
//Delete file at source path
fileManager.removeItemAtPath(sourceFilePath, error: nil)
return success
}
Run Code Online (Sandbox Code Playgroud)
然而.
当图像非常大(就分辨率而言,即1600*1600和更大)时,使用AVAssetExportSession创建的视频为8秒而不是10秒.仅在使用大图像时才会出现时差.
这不是一个大问题.由于人们极不可能将这个类用于大型图像.但是,我仍然想知道发生了什么以及如何解决它.
| 归档时间: |
|
| 查看次数: |
745 次 |
| 最近记录: |