I am doing a livestream where i need to send video from camera and an overlay from UIView It started working But Fps go down after some seconds. If i send CMSampleBuffer directly FPS is Ok But if convert CMSampleImage and perform editing on it FPS decreases after some seconds.
i am attaching the code below where i get CMSampleBuffer from captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection)
I am also attaching FPS graph from facebook livestream sdk.
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
let pts:CMTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) as CMTime
let newPts = CMTimeMakeWithSeconds(CMTimeGetSeconds(pts) + 5, preferredTimescale: pts.timescale);
let image = self.imageFromSampleBuffer(sampleBuffer: sampleBuffer)
let drawableRect = self.window.size.width > 800 ? CGRect(x: 73, y: 0, width: self.window.size.width - 147, height: self.window.size.height) : CGRect(x: 0, y: 0, width: self.window.size.width, height: self.window.size.height)
let webViewImage: UIImage = self.webview.screenShotWithoutDrawHierarchy(drawableRect: drawableRect) ?? UIImage()
let compositeImage = self.composite(image: image!, overlay: webViewImage, drawableRect: drawableRect)
if #available(iOS 13, *){
let newSampleBuffer = compositeImage?.createCMSampleBuffer(presentationTimeStamp: newPts, duration:CMTime.invalid, decodeTimeStamp: sampleBuffer.decodeTimeStamp)
self.rtmpStream.appendSampleBuffer(newSampleBuffer!, withType: .video)
}
}
func composite(image:UIImage, overlay:(UIImage), scaleOverlay: Bool = false, drawableRect:CGRect)->UIImage?{
UIGraphicsBeginImageContext(drawableRect.size)
image.draw(in: drawableRect)
UIColor(red: 0, green: 0, blue: 0, transparency: 0)?.setFill()
overlay.draw(in: drawableRect)
let newimage = UIGraphicsGetImageFromCurrentImageContext()
UIGraphicsEndImageContext()
return newimage
}
private func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> UIImage? {
guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return nil }
let ciImage = CIImage(cvPixelBuffer: imageBuffer)
return self.convert(cmage:ciImage)
}
// Convert CIImage to UIImage
func convert(cmage: CIImage) -> UIImage {
let context = CIContext(options: nil)
let cgImage = context.createCGImage(cmage, from: cmage.extent)!
let image = UIImage(cgImage: cgImage)
return image
}
// clean up AVCapture
func stopCamera(){
session.stopRunning()
}
}
extension UIImage {
var cvPixelBuffer: CVPixelBuffer? {
let attrs = [
String(kCVPixelBufferCGImageCompatibilityKey): kCFBooleanTrue,
String(kCVPixelBufferCGBitmapContextCompatibilityKey): kCFBooleanTrue
] as [String: Any]
var buffer: CVPixelBuffer?
let window = UIApplication.shared.keyWindow!
let drawableRect = window.size.width > 800 ? CGRect(x: 73, y: 0, width: window.size.width - 147, height: window.size.height) : CGRect(x: 0, y: 0, width: window.size.width, height: window.size.height)
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(drawableRect.size.width), Int(drawableRect.size.height), kCVPixelFormatType_32ARGB, attrs as CFDictionary, &buffer)
guard status == kCVReturnSuccess else {
return nil
}
CVPixelBufferLockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))
let pixelData = CVPixelBufferGetBaseAddress(buffer!)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: pixelData, width: Int(window.size.width), height: Int(window.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(buffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
context?.translateBy(x: 0, y: window.size.height)
context?.scaleBy(x: 1.0, y: -1.0)
let newRect = CGRect(x: 0, y: 0, width: window.size.width, height: window.size.height)
UIGraphicsPushContext(context!)
UIColor.clear.setFill()
UIRectFill(newRect)
self.draw(in: newRect)
UIGraphicsPopContext()
CVPixelBufferUnlockBaseAddress(buffer!, CVPixelBufferLockFlags(rawValue: 0))
return buffer
}
func createCMSampleBuffer(presentationTimeStamp: CMTime, duration:CMTime, decodeTimeStamp:CMTime ) -> CMSampleBuffer? {
let pixelBuffer = cvPixelBuffer
var newSampleBuffer: CMSampleBuffer?
var info = CMSampleTimingInfo()
var videoInfo: CMVideoFormatDescription?
info.presentationTimeStamp = presentationTimeStamp
info.duration = duration
info.decodeTimeStamp = CMTime.invalid
CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: pixelBuffer!, formatDescriptionOut: &videoInfo)
CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
imageBuffer: pixelBuffer!,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: videoInfo!,
sampleTiming: &info,
sampleBufferOut: &newSampleBuffer)
return newSampleBuffer!
}
}
Note: I am using Hashinkit but with my custom camera preview layer not attaching it to MTHKView becuase with view and camera both usage its getting heat warnings on Hashinkit