I am writing an application for long exposure image taking.
I used func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!)
to get a CMSampleBuffer
for applying a CIFilter
using CILightenBlendMode
.
The problem is, that blending takes too long and will cause frames to drop. I tried to copy the Buffer:
var copiedBuffer:CMSampleBuffer?
CMSampleBufferCreateCopy(nil, sampleBuffer, &copiedBuffer)
blendImages(copiedBuffer!)
But that didn't help, frames still drop.
Complete Code:
func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
if(CameraService.longExposureRunning){
var copiedBuffer:CMSampleBuffer?
CMSampleBufferCreateCopy(nil, sampleBuffer, &copiedBuffer)
blendImages(copiedBuffer!)
}
}
func captureOutput(captureOutput: AVCaptureOutput!, didDropSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
print("Dropped")
}
func blendImages(buffer:CMSampleBuffer){
let priority = DISPATCH_QUEUE_PRIORITY_DEFAULT
dispatch_async(dispatch_get_global_queue(priority, 0)){
let pixelBuffer = CMSampleBufferGetImageBuffer(buffer)
let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)
if let backgroundImage = self.lastImage{
let blendEffect = CIFilter(name: "CILightenBlendMode")
blendEffect?.setValue(backgroundImage, forKey: kCIInputBackgroundImageKey)
blendEffect?.setValue(cameraImage, forKey: kCIInputImageKey)
self.lastImage = blendEffect?.outputImage
print("Blending")
}else{
self.lastImage = cameraImage
}
let filteredImage = UIImage(CIImage: self.lastImage!)
dispatch_async(dispatch_get_main_queue())
{
imageView.image = filteredImage
}
}
}