为什么视频输出始终为 30 fps,尽管另有设置?



我将后置摄像头配置为120帧/秒。然而,当我通过打印调用此类函数的时间(见下文)来使用captureOutput()检查样本输出时,差异大约为33ms(30fps)。无论我使用activeVideoMinFrameDuration和activeVideoMaxFrameDuration设置的fps是多少,在captureOutput()中观察到的结果fps始终为30 fps。

我已经在iPhone 6上测试过了,它可以处理慢动作视频。我在上阅读了苹果官方文档https://developer.apple.com/library/ios/documentation/AudioVideo/Conceptual/AVFoundationPG/Articles/04_MediaCapture.html.有线索吗?

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate
{
    var captureDevice: AVCaptureDevice?
    let captureSession = AVCaptureSession()
    let videoCaptureOutput = AVCaptureVideoDataOutput()
    var startTime = NSDate.timeIntervalSinceReferenceDate()
    // press button to start the video session
    @IBAction func startPressed() {
        if captureSession.inputs.count > 0 && captureSession.outputs.count > 0 {
            startTime = NSDate.timeIntervalSinceReferenceDate()
            captureSession.startRunning()
        }
    }
    override func viewDidLoad() {
        super.viewDidLoad()
        // set capture session resolution
        captureSession.sessionPreset = AVCaptureSessionPresetLow
        let devices = AVCaptureDevice.devices()
        var avFormat: AVCaptureDeviceFormat? = nil
        for device in devices {
            if (device.hasMediaType(AVMediaTypeVideo)) {
                if (device.position == AVCaptureDevicePosition.Back) {
                    for vFormat in device.formats {
                        let ranges = vFormat.videoSupportedFrameRateRanges as! [AVFrameRateRange]
                        let filtered: Array<Double> = ranges.map({ $0.maxFrameRate } ).filter( {$0 >= 119.0} )
                        if !filtered.isEmpty {
                            // found a good device with good format!
                            captureDevice = device as? AVCaptureDevice
                            avFormat = vFormat as? AVCaptureDeviceFormat
                        }
                    }
                }
            }
        }
        // use the found capture device and format to set things up
        if let dv = captureDevice {
            // configure
            do {
                try dv.lockForConfiguration()
            } catch _ {
                print("failed locking device")
            }
            dv.activeFormat = avFormat
            dv.activeVideoMinFrameDuration = CMTimeMake(1, 120)
            dv.activeVideoMaxFrameDuration = CMTimeMake(1, 120)
            dv.unlockForConfiguration()
            // input -> session
            do {
                let input = try AVCaptureDeviceInput(device: dv)
                if captureSession.canAddInput(input) {
                    captureSession.addInput(input)
                }
            } catch _ {
                print("failed adding capture device as input to capture session")
            }
        }
        // output -> session
        let videoQueue = dispatch_queue_create("videoQueue", DISPATCH_QUEUE_SERIAL)
        videoCaptureOutput.setSampleBufferDelegate(self, queue: videoQueue)
        videoCaptureOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey: Int(kCVPixelFormatType_32BGRA)]
        videoCaptureOutput.alwaysDiscardsLateVideoFrames = true
        if captureSession.canAddOutput(videoCaptureOutput) {
            captureSession.addOutput(videoCaptureOutput)
        }
    }
    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
    {
        print( "(NSDate.timeIntervalSinceReferenceDate() - startTime)" )
        // More pixel/frame processing here
    }
}

找到答案。交换"配置"one_answers"输入->会话"两个块的顺序。

最新更新