如何在相机上绘制矩形并快速捕获矩形内的位置

Nom*_*tar 5 opencv ios swift2

我正在快速工作,我的要求是创建矩形区域

相机。我必须只捕获矩形内的部分

其余部分应显示为模糊。

我尝试了很多链接,但大多数都在 Obj-Ci 中,我知道我必须在 AVCapture 层上添加 UI 层。这个[单击此处]链接对我有帮助,但我无法实现我的目标。

我尝试减小故事板中 ImageView 的大小,但在这种情况下,我的相机在这个小图像视图中调整整个图像。

这是示例图像。

在此输入图像描述

这是我现有的相机代码:

class VideoFeedMicr: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate
  {
let outputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL)

let device: AVCaptureDevice? = {
    let devices = AVCaptureDevice.devicesWithMediaType(AVMediaTypeVideo) as! [AVCaptureDevice]
    var camera: AVCaptureDevice? = nil
    for device in devices {
        if device.position == .Back {
            camera = device
        }
    }
    return camera
}()

var input: AVCaptureDeviceInput? = nil
var delegate: VideoFeedDelegateMicr? = nil

let session: AVCaptureSession = {
    let session = AVCaptureSession()
    session.sessionPreset = AVCaptureSessionPresetHigh
    return session
}()

let videoDataOutput: AVCaptureVideoDataOutput = {
    let output = AVCaptureVideoDataOutput()
    output.videoSettings = [ kCVPixelBufferPixelFormatTypeKey: NSNumber(unsignedInt: kCMPixelFormat_32BGRA) ]
    output.alwaysDiscardsLateVideoFrames = true
    return output
}()

func start() throws {
    var error: NSError! = NSError(domain: "Migrator", code: 0, userInfo: nil)
    do {
        try configure()
        session.startRunning()
        return
    } catch let error1 as NSError {
        error = error1
    }
    throw error
}

func stop() {
    session.stopRunning()
}

private func configure() throws {
    var error: NSError! = NSError(domain: "Migrator", code: 0, userInfo: nil)
    do {
        let maybeInput: AnyObject = try AVCaptureDeviceInput(device: device!)
        input = maybeInput as? AVCaptureDeviceInput
        if session.canAddInput(input) {
            session.addInput(input)
            videoDataOutput.setSampleBufferDelegate(self, queue: outputQueue);
            if session.canAddOutput(videoDataOutput) {
                session.addOutput(videoDataOutput)
                let connection = videoDataOutput.connectionWithMediaType(AVMediaTypeVideo)
                connection.videoOrientation = AVCaptureVideoOrientation.LandscapeRight
                return
            } else {
                print("Video output error.");
            }
        } else {
            print("Video input error. Maybe unauthorised or no camera.")
        }
    } catch let error1 as NSError {
        error = error1
        print("Failed to start capturing video with error: \(error)")
    }
    throw error
}

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
    // Update the delegate
    if delegate != nil {
        delegate!.videoFeedMicr(self, didUpdateWithSampleBuffer: sampleBuffer)
     }
 }
}
Run Code Online (Sandbox Code Playgroud)

我在这里称之为

class ViewMicrScanactivity: UIViewController,VideoFeedDelegateMicr
{

// @IBOutlet weak var button: UIButton!
@IBOutlet weak var button: UIButton!
// @IBOutlet weak var imageView: UIImageView!
let feed: VideoFeedMicr = VideoFeedMicr()

var chequefound :Bool = false;
var accountnumber = ""
var amountlimit = ""

@IBOutlet weak var chequeimage: UIImageView!

override func viewDidLoad()
{

    super.viewDidLoad()
    let value = UIInterfaceOrientation.LandscapeRight.rawValue
    UIDevice.currentDevice().setValue(value, forKey: "orientation")

}

override func shouldAutorotate() -> Bool {
    return true;
}
override func awakeFromNib() {
    super.awakeFromNib()
    feed.delegate = self
}

override func viewWillAppear(animated: Bool) {
    super.viewWillAppear(animated)
    startVideoFeed()
}

override func viewDidDisappear(animated: Bool) {
    super.viewDidDisappear(animated)
    feed.stop()
}

func startVideoFeed() {
    do {
        try feed.start()
        print("Video started.")
    }
    catch {
        // alert?
        // need to look into device permissions
    }
}

func videoFeedMicr(videoFeed: VideoFeedMicr, didUpdateWithSampleBuffer sampleBuffer: CMSampleBuffer!)
{

    let filter = FaceObscurationFilterMicr(sampleBuffer: sampleBuffer)
    if(!chequefound)
    {

        chequefound = filter.process()
        dispatch_async(dispatch_get_main_queue(), { () -> Void in
            self.chequeimage.image = filter.inputuiimage!

          if(self.chequefound)
          {


             filter.cropmicr =  filter.cropToBounds(filter.inputuiimage! , X:0.0 , Y:Double(filter.inputuiimage!.size.height) - Double(90.0) ,width:Double(filter.inputuiimage!.size.width) , height:Double(60.0));
            self.chequeimage.image = filter.cropmicr

            //  let image = UIImage(named: filter.cropmicr )
            //let scaledImage = scaleImage(image!, maxDimension: 640)
            self.performImageRecognitionnew(filter.cropmicr!)
            }
           // self.chequeimage.image = filter.cropmicr!

        })
    }
    else
    {
       print("chequefound = true")
    }

}
 }
Run Code Online (Sandbox Code Playgroud)