背景
我正在使用avcapturesession和avcapturephotoOutput将捕获作为jpeg图像保存。
let captureSession = AVCaptureSession()
let stillImageOutput = AVCapturePhotoOutput()
var captureDevice : AVCaptureDevice?
...
func setupCamera() {
captureDevice = AVCaptureDevice.default(AVCaptureDevice.DeviceType.builtInWideAngleCamera, for: AVMediaType.video, position: .back)
if (captureDevice != nil) {
captureSession.addInput(try AVCaptureDeviceInput(device: captureDevice!))
if captureSession.canAddOutput(stillImageOutput) {
captureSession.addOutput(stillImageOutput)
}
}
}
avcapturedevice设置为自动和连续调整曝光设置
func configureCamera() {
do {
try captureDevice?.lockForConfiguration()
captureDevice?.exposureMode = AVCaptureDevice.ExposureMode.continuousAutoExposure
captureDevice?.unlockForConfiguration()
} catch let error as NSError {
// Errors handled here...
}
}
捕获是由
开始的func capture(){
// Get an instance of AVCapturePhotoSettings class
let photoSettings = AVCapturePhotoSettings()
// Set photo settings
photoSettings.isAutoStillImageStabilizationEnabled = true
photoSettings.flashMode = .off
// Call capturePhoto method by passing photo settings and a
// delegate implementing AVCapturePhotoCaptureDelegate
stillImageOutput.capturePhoto(with: photoSettings, delegate: self)
}
将父类设置为AVCAPTUREPHOTOCAPTUREDELEGATE,并通过IT处理PhotoOutput
//Delegate
func photoOutput(_ captureOutput: AVCapturePhotoOutput,
didFinishProcessingPhoto photoSampleBuffer: CMSampleBuffer?,
previewPhoto previewPhotoSampleBuffer: CMSampleBuffer?,
resolvedSettings: AVCaptureResolvedPhotoSettings,
bracketSettings: AVCaptureBracketedStillImageSettings?,
error: Error?) {
// Make sure there is a photo sample buffer
guard error == nil,
let photoSampleBuffer = photoSampleBuffer else {
//Errors handled here
return
}
// Convert photo same buffer to a jpeg image data by using // AVCapturePhotoOutput
guard let imageData =
AVCapturePhotoOutput.jpegPhotoDataRepresentation(forJPEGSampleBuffer: photoSampleBuffer, previewPhotoSampleBuffer: previewPhotoSampleBuffer) else {
return
}
let capturedImage = UIImage.init(data: imageData , scale: 1.0)
if let image = capturedImage {
//save photo ...
}
}
,一切都应该运行,但是...
问题
我需要知道用于每个捕获的曝光持续时间和ISO值。值之所以有所不同,是因为摄像机设置为自动调整曝光,并且必须这样。
我知道捕获的元数据具有这些值,但我不知道如何访问它们。
暴露持续时间和ISO值对于微调暴露以获得最佳结果是必需的。微调后,这些手动曝光值开始捕获
captureDevice?.setExposureModeCustom(duration: customTime, iso: customISO, completionHandler: nil)
,而不是从捕获元数据中获取所用的ISO和曝光持续时间,而是在捕获照片之前读取这些值。这样做时,重要的是检查曝光是否已完成调整。
在调用捕获之前:
检查自动曝光未调整
while ((captureDevice?.isAdjustingExposure)!){
usleep(100000) // wait 100 msec
}
阅读当前的曝光参数
let current_exposure_duration : CMTime = (captureDevice?.exposureDuration)!
let current_exposure_ISO : Float = (captureDevice?.iso)!
然后拍照
stillImageOutput.capturePhoto(with: photoSettings, delegate: self)