Swift 3:如何在自定义AVFoundation相机上启用闪光灯?

Jim*_*mmy 5 avfoundation swift swift3

我有一个非常基本的AVFoundation相机,captureButton它会拍摄照片并将照片发送secondCameraController给它以供显示.我的问题是有很多iOS 10弃用,我不确定当我按下时,我是如何添加闪存的captureButton.任何帮助将受到高度赞赏.我的代码如下.感谢你们.

class CameraController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate {

let captureSession = AVCaptureSession()
var previewLayer: CALayer!

var captureDevice: AVCaptureDevice!

var takePhoto: Bool = false

override func viewDidLoad() {
    super.viewDidLoad()
    view.backgroundColor = .white
}

override func viewWillAppear(_ animated: Bool) {
    super.viewWillAppear(animated)
    prepareCamera()
}

override func viewDidAppear(_ animated: Bool) {
    super.viewDidAppear(animated)
    navigationController?.setNavigationBarHidden(true, animated: true)
}

let cameraView: UIView = {
    let view = UIView()
    view.backgroundColor = .red
    return view
}()

func prepareCamera() {
    captureSession.sessionPreset = AVCaptureSessionPresetPhoto

    if let availableDevices = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: .back).devices {

        captureDevice = availableDevices.first
        beginSession()
    }
}

func beginSession() {
    do {
        let captureDeviceInput = try AVCaptureDeviceInput(device: captureDevice)

        captureSession.addInput(captureDeviceInput)

    } catch {

        print(error.localizedDescription)
    }

    if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) {
        self.previewLayer = previewLayer
        self.view.layer.addSublayer(self.previewLayer)
        self.previewLayer.frame = CGRect(x: 0, y: 0, width: view.frame.width, height: view.frame.height)
        previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill

        self.view.addSubview(captureButton)

        let width: CGFloat = 85
        captureButton.frame = CGRect(x: (previewLayer.frame.width / 2) - width / 2, y: (previewLayer.frame.height) - width - 25, width: width, height: 85)

        captureSession.startRunning()

        let dataOutput = AVCaptureVideoDataOutput()
        dataOutput.videoSettings = [(kCVPixelBufferPixelFormatTypeKey as NSString): NSNumber(value: kCVPixelFormatType_32BGRA)]

        dataOutput.alwaysDiscardsLateVideoFrames = true

        if captureSession.canAddOutput(dataOutput) {
            captureSession.addOutput(dataOutput)
        }

        captureSession.commitConfiguration()

        let queue = DispatchQueue(label: "com.cheekylabsltd.camera")
        dataOutput.setSampleBufferDelegate(self, queue: queue)
    }
}

func handleCapture() {
    takePhoto = true
}

func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) {

    if takePhoto {
        takePhoto = false

        if let image = self.getImageFromSampleBuffer(buffer: sampleBuffer) {
            let secondController = SecondCameraController()
            secondController.takenPhoto = image

            DispatchQueue.main.async {
                self.present(secondController, animated: true, completion: { 
                    self.stopCaptureSession()
                })
            }
        }
    }
}

func getImageFromSampleBuffer(buffer: CMSampleBuffer) -> UIImage? {

    if let pixelBuffer = CMSampleBufferGetImageBuffer(buffer) {
        let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
        let context = CIContext()

        let imageRect = CGRect(x: 0, y: 0, width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer))

        if let image = context.createCGImage(ciImage, from: imageRect) {
            return UIImage(cgImage: image, scale: UIScreen.main.scale, orientation: .right)
        }
    }

    return nil
}

func stopCaptureSession() {
    self.captureSession.stopRunning()

    if let inputs = captureSession.inputs as? [AVCaptureDeviceInput] {
        for input in inputs {
            self.captureSession.removeInput(input)
        }
    }
}

lazy var captureButton: UIButton = {
    let button = UIButton(type: .system)
    button.backgroundColor = .white
    button.layer.cornerRadius = 42.5
    button.clipsToBounds = true
    button.alpha = 0.40
    button.layer.borderWidth = 4
    button.layer.borderColor = greenColor.cgColor
    button.addTarget(self, action: #selector(handleCapture), for: .touchUpInside)
    return button
}()
}
Run Code Online (Sandbox Code Playgroud)

Jay*_*hta 9

试试这段代码:Swift v3.0

private func flashOn(device:AVCaptureDevice)
    {
        do{
            if (device.hasTorch)
            {
                try device.lockForConfiguration()
                device.torchMode = .on
                device.flashMode = .on
                device.unlockForConfiguration()
            }
        }catch{
            //DISABEL FLASH BUTTON HERE IF ERROR 
            print("Device tourch Flash Error ");
        }
    }
Run Code Online (Sandbox Code Playgroud)

//用于闪断代码

 private func flashOff(device:AVCaptureDevice)
    {
        do{
            if (device.hasTorch){
                try device.lockForConfiguration()
                device.torchMode = .off
                device.flashMode = .off
                device.unlockForConfiguration()
            }
        }catch{
            //DISABEL FLASH BUTTON HERE IF ERROR
            print("Device tourch Flash Error ");
        }
    }
Run Code Online (Sandbox Code Playgroud)

// 方法

// private let session = AVCaptureSession()

//MARK: FLASH UITLITY METHODS
    func toggleFlash() {
        var device : AVCaptureDevice!

        if #available(iOS 10.0, *) {
            let videoDeviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [.builtInWideAngleCamera, .builtInDuoCamera], mediaType: AVMediaTypeVideo, position: .unspecified)!
            let devices = videoDeviceDiscoverySession.devices!
            device = devices.first!

        } else {
            // Fallback on earlier versions
            device = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo)
        }

        if ((device as AnyObject).hasMediaType(AVMediaTypeVideo))
        {
            if (device.hasTorch)
            {
                self.session.beginConfiguration()
                //self.objOverlayView.disableCenterCameraBtn();
                if device.isTorchActive == false {
                    self.flashOn(device: device)
                } else {
                    self.flashOff(device: device);
                }
                //self.objOverlayView.enableCenterCameraBtn();
                self.session.commitConfiguration()
            }
        }
    }
Run Code Online (Sandbox Code Playgroud)