我正在编写一个用于长时间曝光图像拍摄的应用程序。
我用了func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
获取使用 CILightenBlendMode
应用CIFilter
CMSampleBuffer
。
问题是,混合需要太长时间,会导致帧丢失。我尝试复制缓冲区:
var copiedBuffer:CMSampleBuffer?
CMSampleBufferCreateCopy(nil, sampleBuffer, &copiedBuffer)
blendImages(copiedBuffer!)
但这并没有帮助,帧仍然下降。
完整代码:
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
if(CameraService.longExposureRunning){
var copiedBuffer:CMSampleBuffer?
CMSampleBufferCreateCopy(nil, sampleBuffer, &copiedBuffer)
blendImages(copiedBuffer!)
}
}
func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
print("Dropped")
}
func blendImages(buffer:CMSampleBuffer){
let priority = DISPATCH_QUEUE_PRIORITY_DEFAULT
dispatch_async(dispatch_get_global_queue(priority, 0)){
let pixelBuffer = CMSampleBufferGetImageBuffer(buffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
if let backgroundImage = self.lastImage{
let blendEffect = CIFilter(name: "CILightenBlendMode")
blendEffect?.setValue(backgroundImage, forKey: kCIInputBackgroundImageKey)
blendEffect?.setValue(cameraImage, forKey: kCIInputImageKey)
self.lastImage = blendEffect?.outputImage
print("Blending")
}else{
self.lastImage = cameraImage
}
let filteredImage = UIImage(CIImage: self.lastImage!)
dispatch_async(dispatch_get_main_queue())
{
imageView.image = filteredImage
}
}
}
我怀疑CoreImage正在将所有帧连接成一个巨大的内核。您可能会发现CIImageAccumulator
有帮助,但我可以通过强制 Core Image 渲染链并从每一帧重新开始来让您的代码正常工作。
我已经更改了lastImage
变量的类型而不是可选UIImage
,并添加了一个名为 context
的常量,这是一个CIContext
。有了这些,这工作得很好:
用: let context:CIContext = CIContext(options: [kCIContextUseSoftwareRenderer:false])
用于 GPU 而不是 CPU 渲染。
func blendImages(buffer:CMSampleBuffer){
let priority = DISPATCH_QUEUE_PRIORITY_DEFAULT
dispatch_async(dispatch_get_global_queue(priority, 0)){
let pixelBuffer = CMSampleBufferGetImageBuffer(buffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
if let backgroundImage = self.lastImage {
let blendEffect = CIFilter(name: "CILightenBlendMode")!
blendEffect.setValue(
CIImage(image: backgroundImage),
forKey: kCIInputBackgroundImageKey)
blendEffect.setValue(
cameraImage, forKey:
kCIInputImageKey)
let imageRef = self.context.createCGImage(
blendEffect.outputImage!,
fromRect: blendEffect.outputImage!.extent)
self.lastImage = UIImage(CGImage: imageRef)
print("Blending")
}else{
let imageRef = self.context.createCGImage(
cameraImage,
fromRect: cameraImage.extent)
self.lastImage = UIImage(CGImage: imageRef)
}
let filteredImage = self.lastImage
dispatch_async(dispatch_get_main_queue())
{
self.imageView.image = filteredImage
}
}
}
时髦的效果!
西蒙
我能想到的最明显的事情是检查何时设置输出。
确保在 AVAssetWriterInput 上将 DataInRealTime 设置为 true。
https://developer.apple.com/library/tvos/documentation/AVFoundation/Reference/AVAssetWriterInput_Class/index.html#//apple_ref/occ/instp/AVAssetWriterInput/expectsMediaDataInRealTime