tri*_*sta 5 iphone avfoundation ios swift
我将后置摄像头配置为 120 fps。然而,当我通过打印调用该函数的时间来检查使用 captureOutput() 的示例输出时(见下文),差异大约为 33ms (30fps)。无论我使用 activeVideoMinFrameDuration 和 activeVideoMaxFrameDuration 设置什么 fps,在 captureOutput() 中观察到的结果 fps 始终为 30 fps。
我在可以处理慢动作视频的 iPhone 6 上对此进行了测试。我已阅读Apple官方文档https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/04_MediaCapture.html。有什么线索吗?
class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate
{
var captureDevice: AVCaptureDevice?
let captureSession = AVCaptureSession()
let videoCaptureOutput = AVCaptureVideoDataOutput()
var startTime = NSDate.timeIntervalSinceReferenceDate()
// press button to start the video session
@IBAction func startPressed() {
if captureSession.inputs.count > 0 && captureSession.outputs.count > 0 {
startTime = NSDate.timeIntervalSinceReferenceDate()
captureSession.startRunning()
}
}
override func viewDidLoad() {
super.viewDidLoad()
// set capture session resolution
captureSession.sessionPreset = AVCaptureSessionPresetLow
let devices = AVCaptureDevice.devices()
var avFormat: AVCaptureDeviceFormat? = nil
for device in devices {
if (device.hasMediaType(AVMediaTypeVideo)) {
if (device.position == AVCaptureDevicePosition.Back) {
for vFormat in device.formats {
let ranges = vFormat.videoSupportedFrameRateRanges as! [AVFrameRateRange]
let filtered: Array<Double> = ranges.map({ $0.maxFrameRate } ).filter( {$0 >= 119.0} )
if !filtered.isEmpty {
// found a good device with good format!
captureDevice = device as? AVCaptureDevice
avFormat = vFormat as? AVCaptureDeviceFormat
}
}
}
}
}
// use the found capture device and format to set things up
if let dv = captureDevice {
// configure
do {
try dv.lockForConfiguration()
} catch _ {
print("failed locking device")
}
dv.activeFormat = avFormat
dv.activeVideoMinFrameDuration = CMTimeMake(1, 120)
dv.activeVideoMaxFrameDuration = CMTimeMake(1, 120)
dv.unlockForConfiguration()
// input -> session
do {
let input = try AVCaptureDeviceInput(device: dv)
if captureSession.canAddInput(input) {
captureSession.addInput(input)
}
} catch _ {
print("failed adding capture device as input to capture session")
}
}
// output -> session
let videoQueue = dispatch_queue_create("videoQueue", DISPATCH_QUEUE_SERIAL)
videoCaptureOutput.setSampleBufferDelegate(self, queue: videoQueue)
videoCaptureOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: Int(kCVPixelFormatType_32BGRA)]
videoCaptureOutput.alwaysDiscardsLateVideoFrames = true
if captureSession.canAddOutput(videoCaptureOutput) {
captureSession.addOutput(videoCaptureOutput)
}
}
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
{
print( "\(NSDate.timeIntervalSinceReferenceDate() - startTime)" )
// More pixel/frame processing here
}
}
Run Code Online (Sandbox Code Playgroud)
| 归档时间: |
|
| 查看次数: |
1172 次 |
| 最近记录: |