如何使用淡入淡出过渡合并 2 个视频?

ker*_*lda 4 avfoundation avplayer avmutablecomposition avasset swift

我正在尝试将两个重叠的 2 个视频合并为 2 秒。在这种重叠中,我想淡入第二个视频(或淡出第一个视频以显示第二个视频,任何一个都很棒)。

第一个视频按预期在结束前 2 秒淡出,但当它淡出时,我看到的是黑屏,而不是第二个视频淡入。在视频 1 结束时,视频 2 在动画淡入过程中显示了一半。

我看不到重叠的轨道,我做错了什么?下面是我的代码

func setupVideo() {
        
    let url = URL(fileURLWithPath: Bundle.main.path(forResource: "demoVideoTwo", ofType: "mp4")!)
    let assetOne = AVAsset(url: url)
    
    let urlTwo = URL(fileURLWithPath: Bundle.main.path(forResource: "demoVideoThree", ofType: "mp4")!)
    let assetTwo = AVAsset(url: urlTwo)
            
    let mixComposition = AVMutableComposition()
    var instructions = [AVMutableVideoCompositionLayerInstruction]()
    var mainInstructionList = [AVMutableVideoCompositionInstruction]()
    var lastTime = CMTime.zero
    
    // Create Track One

    guard let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
        return
    }
    
    // Setup AVAsset 1

    let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: assetOne.duration)
    
    do {
        try videoTrack.insertTimeRange(timeRange, of: assetOne.tracks(withMediaType: .video)[0], at: lastTime)
        
        try audioTrack.insertTimeRange(timeRange, of: assetOne.tracks(withMediaType: .audio)[0], at: lastTime)
        
    } catch {
        print(error)
    }
    
    // Setup Layer Instruction 1

    let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)

    let duration = CMTime(seconds: 2, preferredTimescale: 60)
    let transitTime = CMTime(seconds: 2, preferredTimescale: 60)
    let insertTime = CMTimeSubtract(assetOne.duration, transitTime)
    let instRange = CMTimeRangeMake(start: insertTime, duration: duration)
    layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: instRange)
    instructions.append(layerInstruction)

    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRangeMake(start: lastTime, duration: assetOne.duration)
    mainInstruction.layerInstructions = instructions

    mainInstructionList.append(mainInstruction)

    lastTime = CMTimeAdd(lastTime, assetOne.duration)
    
    
    // Create Track One

    guard let videoTrackTwo = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), let audioTrackTwo = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
        return
    }
    
    // Setup AVAsset 2

    let transitionTime = CMTime(seconds: 2, preferredTimescale: 60)
    let newLastTime = CMTimeSubtract(assetOne.duration, transitionTime)

    let timeRangeTwo = CMTimeRangeMake(start: CMTime.zero, duration: assetTwo.duration)

    do {
        try videoTrackTwo.insertTimeRange(timeRangeTwo, of: assetTwo.tracks(withMediaType: .video)[0], at: newLastTime)

        try audioTrackTwo.insertTimeRange(timeRangeTwo, of: assetTwo.tracks(withMediaType: .audio)[0], at: newLastTime)

    } catch {
       print(error)
    }
    
    
    // Setup Layer Instruction 2

    let layerInstructionTwo = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrackTwo)
    let durationTwo = CMTime(seconds: 4, preferredTimescale: 60)
    let instRangeTwo = CMTimeRangeMake(start: newLastTime, duration: durationTwo)
    layerInstructionTwo.setOpacityRamp(fromStartOpacity: 0.0, toEndOpacity: 1.0, timeRange: instRangeTwo)
    instructions.append(layerInstructionTwo)

    let mainInstructionTwo = AVMutableVideoCompositionInstruction()
    mainInstructionTwo.timeRange = CMTimeRangeMake(start: lastTime, duration: assetTwo.duration)
    mainInstructionTwo.layerInstructions = instructions
    
    mainInstructionList.append(mainInstructionTwo)
    
    // Setup Video Composition

    let mainComposition = AVMutableVideoComposition()
    mainComposition.instructions = mainInstructionList
    mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 60)
    mainComposition.renderSize = videoTrack.naturalSize
            
    let item = AVPlayerItem(asset: mixComposition)
    item.videoComposition = mainComposition
    
    player = AVPlayer(playerItem: item)
    
    let playerLayer: AVPlayerLayer = {
        let layer = AVPlayerLayer(player: player)
        layer.videoGravity = .resizeAspectFill
        return layer
    }()
    
    let playerWidth: CGFloat = UIScreen.main.bounds.size.width
    let videoHeight = UIScreen.main.bounds.size.width * 9 / 16
            
    playerLayer.frame = CGRect(x: 0, y: 0, width: playerWidth, height: videoHeight)
    self.layer.addSublayer(playerLayer)
}
Run Code Online (Sandbox Code Playgroud)

Cag*_*glu 7

不要为每个视频轨道创建 AVMutableVideoCompositionInstruction 并且不要每次都为其分配指令。相反,在创建 AVMutableVideoComposition 之前创建一次 AVMutableVideoCompositionInstruction,然后为其分配指令。

这是按照您想要的方式工作的代码。

func setupVideo() {

    let url = URL(fileURLWithPath: Bundle.main.path(forResource: "demoVideoTwo", ofType: "mp4")!)
    let assetOne = AVAsset(url: url)

    let urlTwo = URL(fileURLWithPath: Bundle.main.path(forResource: "demoVideoTwo", ofType: "mp4")!)
    let assetTwo = AVAsset(url: urlTwo)

    let mixComposition = AVMutableComposition()
    var instructions = [AVMutableVideoCompositionLayerInstruction]()
    var lastTime = CMTime.zero

    // Create Track One

    guard let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
        return
    }

    // Setup AVAsset 1

    let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: assetOne.duration)

    do {
        try videoTrack.insertTimeRange(timeRange, of: assetOne.tracks(withMediaType: .video)[0], at: lastTime)

        try audioTrack.insertTimeRange(timeRange, of: assetOne.tracks(withMediaType: .audio)[0], at: lastTime)

    } catch {
        print(error)
    }

    // Setup Layer Instruction 1

    let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)

    let duration = CMTime(seconds: 2, preferredTimescale: 60)
    let transitTime = CMTime(seconds: 2, preferredTimescale: 60)
    let insertTime = CMTimeSubtract(assetOne.duration, transitTime)
    let instRange = CMTimeRangeMake(start: insertTime, duration: duration)
    layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: instRange)
    instructions.append(layerInstruction)

    lastTime = CMTimeAdd(lastTime, assetOne.duration)


    // Create Track One

    guard let videoTrackTwo = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), let audioTrackTwo = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
        return
    }

    // Setup AVAsset 2

    let transitionTime = CMTime(seconds: 2, preferredTimescale: 60)
    let newLastTime = CMTimeSubtract(assetOne.duration, transitionTime)

    let timeRangeTwo = CMTimeRangeMake(start: CMTime.zero, duration: assetTwo.duration)

    do {
        try videoTrackTwo.insertTimeRange(timeRangeTwo, of: assetTwo.tracks(withMediaType: .video)[0], at: newLastTime)

        try audioTrackTwo.insertTimeRange(timeRangeTwo, of: assetTwo.tracks(withMediaType: .audio)[0], at: newLastTime)

    } catch {
       print(error)
    }

    // Setup Layer Instruction 2

    let layerInstructionTwo = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrackTwo)
    layerInstructionTwo.setOpacity(1.0, at: newLastTime)
    instructions.append(layerInstructionTwo)

    // Setup Video Composition

    let mainInstruction = AVMutableVideoCompositionInstruction()
    mainInstruction.timeRange = CMTimeRangeMake(start: .zero, duration: CMTimeAdd(newLastTime, assetTwo.duration))
    mainInstruction.layerInstructions = instructions

    let mainComposition = AVMutableVideoComposition()
    mainComposition.instructions = [mainInstruction]
    mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 60)
    mainComposition.renderSize = videoTrack.naturalSize

    let item = AVPlayerItem(asset: mixComposition)
    item.videoComposition = mainComposition

    player = AVPlayer(playerItem: item)

    let playerLayer: AVPlayerLayer = {
        let layer = AVPlayerLayer(player: player)
        layer.videoGravity = .resizeAspectFill
        return layer
    }()

    let playerWidth: CGFloat = UIScreen.main.bounds.size.width
    let videoHeight = UIScreen.main.bounds.size.width * 9 / 16

    playerLayer.frame = CGRect(x: 0, y: 0, width: playerWidth, height: videoHeight)
    self.layer.addSublayer(playerLayer)
}
Run Code Online (Sandbox Code Playgroud)