如何使用前置摄像头镜像、AVFoundation、Swift 修复捕获图像

Ran*_*eek 4 camera view avfoundation ios swift

如何使用前置摄像头镜像修复捕获图像?SnapChat 似乎可以修复它以及 WhatsApp 和 Instagram,我该怎么办?我真的很想为此找到解决方案......它很烦人......提前谢谢

我已经看到在从前置摄像头 iOS 5.0 捕获时总是看到镜像,但它翻转了后置摄像头和前置摄像头的图像,这并没有真正解决任何人的问题。如果有人能帮我弄清楚如何只翻转前置摄像头图像或任何其他很棒的解决方案!

import UIKit
import AVFoundation

@available(iOS 10.0, *)
class CameraViewController: UIViewController,AVCaptureVideoDataOutputSampleBufferDelegate {

let photoSettings = AVCapturePhotoSettings()
    var audioPlayer = AVAudioPlayer()
    var captureSession = AVCaptureSession()
    var videoDeviceInput: AVCaptureDeviceInput!
    var previewLayer = AVCaptureVideoPreviewLayer()
    var frontCamera: Bool = false
    var captureDevice:AVCaptureDevice!
    var takePhoto = false

    override func viewDidLoad() {
        super.viewDidLoad()
    }

    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        prepareCamera()
    }

    func prepareCamera() {
        captureSession.sessionPreset = AVCaptureSessionPresetPhoto

        if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {
            captureDevice = availableDevices.first
            beginSession()
        } 
    }

    func frontCamera(_ front: Bool){
        let devices = AVCaptureDevice.devices()

        do{
            try captureSession.removeInput(AVCaptureDeviceInput(device:captureDevice!)) 
        }catch{
            print("Error")
        }

        for device in devices!{
            if((device as AnyObject).hasMediaType(AVMediaTypeVideo)){
                if front{
                    if (device as AnyObject).position == AVCaptureDevicePosition.front {
                        captureDevice = device as? AVCaptureDevice

                        do{
                            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
                        }catch{}
                        break
                    }
                }else{
                    if (device as AnyObject).position == AVCaptureDevicePosition.back {
                        captureDevice = device as? AVCaptureDevice

                        do{
                            try captureSession.addInput(AVCaptureDeviceInput(device: captureDevice!))
                        }catch{}
                        break
                    }
                }
            }
        }
    }

    func beginSession () {
        do {
            let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)
            if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
            self.previewLayer = previewLayer
            containerView.layer.addSublayer(previewLayer as? CALayer ?? CALayer())
            self.previewLayer.frame = self.view.layer.frame
            self.previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
            previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait
            captureSession.startRunning()

            let dataOutput = AVCaptureVideoDataOutput()
            dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString):NSNumber(value:kCVPixelFormatType_32BGRA)]

            dataOutput.alwaysDiscardsLateVideoFrames = true

            if captureSession.canAddOutput(dataOutput) {
                captureSession.addOutput(dataOutput)

                photoSettings.isHighResolutionPhotoEnabled = true
                photoSettings.isAutoStillImageStabilizationEnabled = true
            }

            captureSession.commitConfiguration()

            let queue = DispatchQueue(label: "com.NightOut.captureQueue")
            dataOutput.setSampleBufferDelegate(self, queue: queue) 
        }
    }
        @IBAction func takePhoto(_ sender: Any) {
            takePhoto = true

            photoSettings.isHighResolutionPhotoEnabled = true
            photoSettings.isAutoStillImageStabilizationEnabled = true
    }

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {
        if takePhoto {
            takePhoto = false
            if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
                let photoVC = UIStoryboard(name: "Main", bundle: nil).instantiateViewController(withIdentifier: "PhotoVC") as! PhotoPreviewViewController

                photoVC.takenPhoto = image

                DispatchQueue.main.async {
                    self.present(photoVC, animated: true, completion: {
                        self.stopCaptureSession()
                    })
                }
            }  
        }
    }

    func getImageFromSampleBuffer (buffer:CMSampleBuffer) -> UIImage? {
        if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
            let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
            let context = CIContext()

            let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))

            if let image = context.createCGImage(ciImage, from: imageRect) {
                return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
            }
    }
        return nil
    }

    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)

        self.captureSession.stopRunning()
    }

    func stopCaptureSession () {
        self.captureSession.stopRunning()

        if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
            for input in inputs {
                self.captureSession.removeInput(input)
            }
        }
    }

    override func didReceiveMemoryWarning() {
        super.didReceiveMemoryWarning()
    }

    @IBAction func DismissButtonAction(_ sender: UIButton) {

        UIView.animate(withDuration: 0.1, animations: {
            self.DismissButton.transform = CGAffineTransform.identity.scaledBy(x: 0.8, y: 0.8)
        }, completion: { (finish) in
            UIView.animate(withDuration: 0.1, animations: {
                self.DismissButton.transform = CGAffineTransform.identity
            })
        })
        performSegue(withIdentifier: "Segue", sender: nil)
    }
}
Run Code Online (Sandbox Code Playgroud)

Ran*_*eek 5

我自己想出来了,这是解决方案:

if captureDevice.position == AVCaptureDevicePosition.back {
    if let image = context.createCGImage(ciImage, from: imageRect) {
        return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
    }
}
                
if captureDevice.position == AVCaptureDevicePosition.front {
    if let image = context.createCGImage(ciImage, from: imageRect) {
        return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .leftMirrored)
    }
}
Run Code Online (Sandbox Code Playgroud)

  • 对此的正确解决方案是在调用 CMSampleBufferGetImageBuffer 之前,当它是 captureOutput 回调中的前置摄像头时,将 AVCaptureConnection 对象的 isVideoMirrored 属性设置为 true。 (8认同)

Sta*_*ith 5

我建议使用不带转换isVideoMirrored的本机属性AVCaptureConnection

以下是示例步骤:

  1. 将当前相机位置保存在自定义相机类中:

private var currentCameraPosition: AVCaptureDevice.Position = .back

  1. 每次用户旋转相机时,更新currentCameraPosition

  2. 最后在你的capturePhoto()方法中进行isVideoMirrored相应的设置:

     guard let connection = stillImageOutput.connection(with: .video) else { return }
     connection.isVideoMirrored = currentCameraPosition == .front
    
    Run Code Online (Sandbox Code Playgroud)

当使用前置摄像头时,这将镜像照片。

使用相同的方法镜像前置摄像头视频:

    guard let connection = videoCaptureOutput.connection(with: .video) else { return }
    connection.isVideoMirrored = currentCameraPosition == .front
   
Run Code Online (Sandbox Code Playgroud)

就是这样!