应用AVVideoComposition后旋转视频

IHa*_*ion 7 avfoundation ios avvideocomposition cifilter swift

应用AVVideoComposition到我的后AVPlayerItem,我应用的过滤器确实有效,但视频在旋转中旋转AVPlayerLayer.

我知道问题不在于过滤后的帧,因为如果我在a中显示帧UIImageView,则帧会100%正确呈现.

视频显示正确,直到我申请videoComposition.设置videoGravityAVPlayerLayer没有帮助.

视频顺时针旋转90º并在图层中拉伸.

基本上,视频在通过AVPlayerLayer之前AVPlayerItem被完美地显示AVMutableVideoComposition.一旦发生这种情况,视频将旋转-90º,然后进行缩放以适应与过滤前视频相同的尺寸.这告诉我,它没有意识到它的变换已经是正确的,因此它正在重新应用变换本身.

为什么会发生这种情况,我该如何解决?

这是一些代码:

private func filterVideo(with filter: Filter?) {
    if let player = player, let playerItem = player.currentItem {
        let composition = AVMutableComposition()
        let videoAssetTrack = playerItem.asset.tracks(withMediaType: .video).first
        let videoCompositionTrack = composition.addMutableTrack(withMediaType: .video, preferredTrackID: kCMPersistentTrackID_Invalid)
        try? videoCompositionTrack?.insertTimeRange(CMTimeRange(start: kCMTimeZero, duration: playerItem.asset.duration), of: videoAssetTrack!, at: kCMTimeZero)
        videoCompositionTrack?.preferredTransform = videoAssetTrack!.preferredTransform

        let videoComposition = AVMutableVideoComposition(asset: composition, applyingCIFiltersWithHandler: { (request) in
            let filteredImage = <...>
            request.finish(with: filteredImage, context: nil)
        })

        playerItem.videoComposition = videoComposition
    }
}
Run Code Online (Sandbox Code Playgroud)

Dhi*_*iru 7

你在renderSize中有问题AVVideoComposition 应该应用变换 AVMutableVideoCompositionInstruction(即.Rotatetranslate 变换).

我已经在Objective-c中完成了 我发布我的代码,你可以将语法转换为Swift

Objective-C的

//------------------------------------
//      FIXING ORIENTATION
//------------------------------------


AVMutableVideoCompositionInstruction * MainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeAdd(firstAsset.duration, secondAsset.duration));

AVMutableVideoCompositionLayerInstruction *FirstlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:secondTrack]; // second

AVAssetTrack *FirstAssetTrack = [[firstAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation FirstAssetOrientation_  = UIImageOrientationUp;
BOOL  isFirstAssetPortrait_  = NO;
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform;
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0)  {FirstAssetOrientation_= UIImageOrientationRight; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0)  {FirstAssetOrientation_ =  UIImageOrientationLeft; isFirstAssetPortrait_ = YES;}
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0)   {FirstAssetOrientation_ =  UIImageOrientationUp;}
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) {FirstAssetOrientation_ = UIImageOrientationDown;}
CGFloat FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.width;
if(isFirstAssetPortrait_){
    FirstAssetScaleToFitRatio = 320.0/FirstAssetTrack.naturalSize.height;
    CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
    [FirstlayerInstruction setTransform:CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:kCMTimeZero];
}else{
    CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
    [FirstlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(FirstAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:kCMTimeZero];
}
[FirstlayerInstruction setOpacity:0.0 atTime:firstAsset.duration];



AVMutableVideoCompositionLayerInstruction *SecondlayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:firstTrack];
AVAssetTrack *SecondAssetTrack = [[secondAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
UIImageOrientation SecondAssetOrientation_  = UIImageOrientationUp;
BOOL  isSecondAssetPortrait_  = NO;
CGAffineTransform secondTransform = SecondAssetTrack.preferredTransform;
if(secondTransform.a == 0 && secondTransform.b == 1.0 && secondTransform.c == -1.0 && secondTransform.d == 0)  {SecondAssetOrientation_= UIImageOrientationRight; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 0 && secondTransform.b == -1.0 && secondTransform.c == 1.0 && secondTransform.d == 0)  {SecondAssetOrientation_ =  UIImageOrientationLeft; isSecondAssetPortrait_ = YES;}
if(secondTransform.a == 1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == 1.0)   {SecondAssetOrientation_ =  UIImageOrientationUp;}
if(secondTransform.a == -1.0 && secondTransform.b == 0 && secondTransform.c == 0 && secondTransform.d == -1.0) {SecondAssetOrientation_ = UIImageOrientationDown;}
CGFloat SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.width;
if(isSecondAssetPortrait_){
    SecondAssetScaleToFitRatio = 320.0/SecondAssetTrack.naturalSize.height;
    CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
    [SecondlayerInstruction setTransform:CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor) atTime:firstAsset.duration];
}else{
    ;
    CGAffineTransform SecondAssetScaleFactor = CGAffineTransformMakeScale(SecondAssetScaleToFitRatio,SecondAssetScaleToFitRatio);
    [SecondlayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(SecondAssetTrack.preferredTransform, SecondAssetScaleFactor),CGAffineTransformMakeTranslation(0, 160)) atTime:secondAsset.duration];
}


MainInstruction.layerInstructions = [NSArray arrayWithObjects:SecondlayerInstruction,nil];;

AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];
MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(320.0, 480.0);


// Now , you have Orientation Fixed Instrucation layer
// add this composition to your video 
// If you want to export Video than you can do like below

NSString *documentsDirectory = [NSHomeDirectory()
stringByAppendingPathComponent:@"Documents"];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"final_merged_video-%d.mp4",arc4random() % 1000]];


NSURL *url = [NSURL fileURLWithPath:myPathDocs];
// 5 - Create exporter
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPreset640x480];
exporter.outputURL=url;
exporter.videoComposition=MainCompositionInst;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^
{

[[AppDelegate Getdelegate] hideIndicator];

[self exportDidFinish:exporter];
});
}];
Run Code Online (Sandbox Code Playgroud)

对于Swift 看到这个答案点击这里

此外,您还可以尝试通过对其应用旋转变换来旋转视频图层.

#define degreeToRadian(x) (M_PI * x / 180.0)

[_playerLayer setAffineTransform:CGAffineTransformMakeRotation(degreeToRad??ian(degree))]
Run Code Online (Sandbox Code Playgroud)


Tik*_*iko 5

如果您想玩,AVMutableCompostion您应该将AVAssetTrack's设置preferredTransformAVMutableCompositionTrack's preferredTransform

let asset = AVAsset(url: url!)

let composition = AVMutableComposition()
let compositionTrack = composition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)

let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first

try? compositionTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, asset.duration), of: videoTrack!, at: kCMTimeZero)

compositionTrack.preferredTransform = (videoTrack?.preferredTransform)!

let playerItem = AVPlayerItem(asset: composition)
let filter = CIFilter(name: "CIColorInvert")
playerItem.videoComposition = AVVideoComposition(asset: composition, applyingCIFiltersWithHandler: { (request: AVAsynchronousCIImageFilteringRequest) in
            filter?.setValue(request.sourceImage, forKey: kCIInputImageKey)
            request.finish(with: (filter?.outputImage)!, context: nil)
        })
 .... the rest of code
Run Code Online (Sandbox Code Playgroud)


IHa*_*ion 2

最后对我有用的是:

private func filterVideo(with filter: Filter?) {
    guard let player = playerLayer?.player, let playerItem = player.currentItem else { return }

    let videoComposition = AVVideoComposition(asset: playerItem.asset, applyingCIFiltersWithHandler: { (request) in
        if let filter = filter {
            if let filteredImage = filter.filterImage(request.sourceImage) {
                let output = filteredImage.cropping(to: request.sourceImage.extent)
                request.finish(with: output, context: nil)
            } else {
                printError("Image not filtered")
                request.finish(with: RenderError.couldNotFilter)
            }
        } else {
            let output = request.sourceImage.cropping(to: request.sourceImage.extent)
            request.finish(with: output, context: nil)
        }
    })

    playerItem.videoComposition = videoComposition
}
Run Code Online (Sandbox Code Playgroud)

这是filterImage的函数Filter,它只是 的一个很好的小包装CIFilter

func filterImage(_ ciImage: CIImage) -> CIImage? {
    guard let filter = ciFilter else { return nil }
    filter.setDefaults()
    filter.setValue(ciImage, forKey: kCIInputImageKey)
    guard let filteredImageData = filter.value(forKey: kCIOutputImageKey) as? CIImage else { return nil }
    return filteredImageData
}
Run Code Online (Sandbox Code Playgroud)