Sat*_*ima 5 video core-animation avfoundation ios avmutablecomposition
我正在编写一段代码,以从iOS设备上的多个图像和多个视频生成幻灯片视频。我可以用一个视频和多个图像来做到这一点,但是我不知道如何将其增强为多个视频。
这是我可以用一个视频和两个图像生成的示例视频。
这是准备出口商的主要程序。
// Prepare the temporary location to store generated video
NSURL * urlAsset = [NSURL fileURLWithPath:[StoryMaker tempFilePath:@"mov"]];
// Prepare composition and _exporter
AVMutableComposition *composition = [AVMutableComposition composition];
AVAssetExportSession* exporter = [[AVAssetExportSession alloc] initWithAsset:composition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = urlAsset;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = [self _addVideo:composition time:timeVideo];
Run Code Online (Sandbox Code Playgroud)
这是_addVideo:time:方法,它创建videoLayer。
-(AVVideoComposition*) _addVideo:(AVMutableComposition*)composition time:(CMTime)timeVideo {
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = _sizeVideo;
videoComposition.frameDuration = CMTimeMake(1,30); // 30fps
AVMutableCompositionTrack *compositionVideoTrack = [composition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero,timeVideo) ofTrack:_baseVideoTrack atTime:kCMTimeZero error:nil];
// Prepare the parent layer
CALayer *parentLayer = [CALayer layer];
parentLayer.backgroundColor = [UIColor blackColor].CGColor;
parentLayer.frame = CGRectMake(0, 0, _sizeVideo.width, _sizeVideo.height);
// Prepare images parent layer
CALayer *imageParentLayer = [CALayer layer];
imageParentLayer.frame = CGRectMake(0, 0, _sizeVideo.width, _sizeVideo.height);
[parentLayer addSublayer:imageParentLayer];
// Specify the perspecrtive view
CATransform3D perspective = CATransform3DIdentity;
perspective.m34 = -1.0 / imageParentLayer.frame.size.height;
imageParentLayer.sublayerTransform = perspective;
// Animations
_beginTime = 1E-10;
_endTime = CMTimeGetSeconds(timeVideo);
CALayer* videoLayer = [self _addVideoLayer:imageParentLayer];
[self _addAnimations:imageParentLayer time:timeVideo];
videoComposition.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer];
// Prepare the instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
{
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, timeVideo);
AVAssetTrack *videoTrack = [[composition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
[layerInstruction setTransform:_baseVideoTrack.preferredTransform atTime:kCMTimeZero];
instruction.layerInstructions = @[layerInstruction];
}
videoComposition.instructions = @[instruction];
return videoComposition;
}
Run Code Online (Sandbox Code Playgroud)
_addAnimation:time:方法可添加图像图层,并计划包括_videoLayer在内的所有图层的动画。
到目前为止一切正常。
但是,我不知道如何将第二个视频添加到此幻灯片中。
《 AVFoundation编程指南》中的示例使用多个视频合成指令(AVMutableVideoCompositionInstruction)来组合两个视频,但只是将它们渲染为一个CALayer对象,该对象在videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:inLayer:方法(AVVideoCompositionCoreAnimationTool的方法)中指定。
我想将两个视频轨道渲染到两个单独的层(layer1和layer2)中,并分别对其进行动画处理,就像我正在处理与图像关联的层一样。
我也有这个问题,想为多个视频制作动画。我发现AVVideoCompositionCoreAnimationTool可以接受多个视频层,但问题是它们是同一视频的多个实例。所以我的解决方法是制作一个大视频,其中 2 个视频并排放置,并使用遮罩在其自己的图层中显示每个视频。
这是我的代码,您只需要 2 个示例视频即可使其工作:
class ViewController: UIViewController {
var myurl: URL?
override func viewDidLoad() {
super.viewDidLoad()
}
@IBAction func newMerge(_ sender: Any) {
print("making vid")
let path = Bundle.main.path(forResource: "sample_video", ofType:"mp4")
let fileURL = NSURL(fileURLWithPath: path!)
let vid = AVURLAsset(url: fileURL as URL)
let path2 = Bundle.main.path(forResource: "example2", ofType:"mp4")
let fileURL2 = NSURL(fileURLWithPath: path2!)
let vid2 = AVURLAsset(url: fileURL2 as URL)
newoverlay(video: vid, withSecondVideo: vid2)
}
func newoverlay(video firstAsset: AVURLAsset, withSecondVideo secondAsset: AVURLAsset) {
// 1 - Create AVMutableComposition object. This object will hold your AVMutableCompositionTrack instances.
let mixComposition = AVMutableComposition()
// 2 - Create two video tracks
guard let firstTrack = mixComposition.addMutableTrack(withMediaType: .video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { return }
do {
try firstTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: firstAsset.duration),
of: firstAsset.tracks(withMediaType: .video)[0],
at: CMTime.zero)
} catch {
print("Failed to load first track")
return
}
guard let secondTrack = mixComposition.addMutableTrack(withMediaType: .video,
preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else { return }
do {
try secondTrack.insertTimeRange(CMTimeRangeMake(start: CMTime.zero, duration: secondAsset.duration),
of: secondAsset.tracks(withMediaType: .video)[0],
at: CMTime.zero)
} catch {
print("Failed to load second track")
return
}
// Watermark Effect
let width: CGFloat = firstTrack.naturalSize.width + secondTrack.naturalSize.width
let height: CGFloat = CGFloat.maximum(firstTrack.naturalSize.height, secondTrack.naturalSize.height)
//bg layer
let bglayer = CALayer()
bglayer.frame = CGRect(x: 0, y: 0, width: width, height: height)
bglayer.backgroundColor = UIColor.blue.cgColor
let box1 = CALayer()
box1.frame = CGRect(x: 0, y: 0, width: firstTrack.naturalSize.width, height: firstTrack.naturalSize.height - 1)
box1.backgroundColor = UIColor.red.cgColor
box1.masksToBounds = true
let timeInterval: CFTimeInterval = 1
let scaleAnimation = CABasicAnimation(keyPath: "transform.scale")
scaleAnimation.fromValue = 1.0
scaleAnimation.toValue = 1.1
scaleAnimation.autoreverses = true
scaleAnimation.isRemovedOnCompletion = false
scaleAnimation.duration = timeInterval
scaleAnimation.repeatCount=Float.infinity
scaleAnimation.beginTime = AVCoreAnimationBeginTimeAtZero
box1.add(scaleAnimation, forKey: nil)
let box2 = CALayer()
box2.frame = CGRect(x: firstTrack.naturalSize.width + 100, y: 0, width: secondTrack.naturalSize.width, height: secondTrack.naturalSize.height)
box2.backgroundColor = UIColor.green.cgColor
box2.masksToBounds = true
let videolayer = CALayer()
videolayer.frame = CGRect(x: 0, y: -(height - firstTrack.naturalSize.height), width: width + 2, height: height + 2)
videolayer.backgroundColor = UIColor.clear.cgColor
let videolayer2 = CALayer()
videolayer2.frame = CGRect(x: -firstTrack.naturalSize.width, y: 0, width: width, height: height)
videolayer2.backgroundColor = UIColor.clear.cgColor
let parentlayer = CALayer()
parentlayer.frame = CGRect(x: 0, y: 0, width: width, height: height)
parentlayer.addSublayer(bglayer)
parentlayer.addSublayer(box1)
parentlayer.addSublayer(box2)
box1.addSublayer(videolayer)
box2.addSublayer(videolayer2)
let layercomposition = AVMutableVideoComposition()
layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
layercomposition.renderSize = CGSize(width: width, height: height)
layercomposition.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAsVideoLayers: [videolayer, videolayer2], in: parentlayer)
// 2.1
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(start: CMTime.zero, duration: CMTimeAdd(firstAsset.duration, secondAsset.duration))
// 2.2 - this is where the 2 videos get combined into one large one.
let firstInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstTrack)
let move = CGAffineTransform(translationX: 0, y: 0)
firstInstruction.setTransform(move, at: CMTime.zero)
let secondInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondTrack)
let move2 = CGAffineTransform(translationX: firstTrack.naturalSize.width, y: 0)
secondInstruction.setTransform(move2, at: CMTime.zero)
// 2.3
mainInstruction.layerInstructions = [firstInstruction, secondInstruction]
//let mainComposition = AVMutableVideoComposition()
layercomposition.instructions = [mainInstruction]
layercomposition.frameDuration = CMTimeMake(value: 1, timescale: 30)
layercomposition.renderSize = CGSize(width: width, height: height)
mainInstruction.backgroundColor = UIColor.clear.cgColor
// create new file to receive data
let dirPaths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
let docsDir = dirPaths[0] as NSString
let movieFilePath = docsDir.appendingPathComponent("result.mov")
let movieDestinationUrl = NSURL(fileURLWithPath: movieFilePath)
// use AVAssetExportSession to export video
let assetExport = AVAssetExportSession(asset: mixComposition, presetName:AVAssetExportPresetHighestQuality)
assetExport?.outputFileType = AVFileType.mov
assetExport?.videoComposition = layercomposition
// Check exist and remove old file
FileManager.default.removeItemIfExisted(movieDestinationUrl as URL)
assetExport?.outputURL = movieDestinationUrl as URL
assetExport?.exportAsynchronously(completionHandler: {
switch assetExport!.status {
case AVAssetExportSession.Status.failed:
print("failed")
print(assetExport?.error ?? "unknown error")
case AVAssetExportSession.Status.cancelled:
print("cancelled")
print(assetExport?.error ?? "unknown error")
default:
print("Movie complete")
self.myurl = movieDestinationUrl as URL
PHPhotoLibrary.shared().performChanges({
PHAssetChangeRequest.creationRequestForAssetFromVideo(atFileURL: movieDestinationUrl as URL)
}) { saved, error in
if saved {
print("Saved")
}
}
self.playVideo()
}
})
}
func playVideo() {
let player = AVPlayer(url: myurl!)
let playerLayer = AVPlayerLayer(player: player)
playerLayer.frame = self.view.bounds
self.view.layer.addSublayer(playerLayer)
player.play()
print("playing...")
}
}
extension FileManager {
func removeItemIfExisted(_ url:URL) -> Void {
if FileManager.default.fileExists(atPath: url.path) {
do {
try FileManager.default.removeItem(atPath: url.path)
}
catch {
print("Failed to delete file")
}
}
}
}
Run Code Online (Sandbox Code Playgroud)
| 归档时间: |
|
| 查看次数: |
1302 次 |
| 最近记录: |