1

Right now i am developing one module in that module i need to create video from array CGImage and while doing that processing my application get crashed at some point , i am not able to figure out exact reason behind that crash.

can anyone please suggest me i am going in right direction or not , should i convert [CGImage] to video or do i need to choose another approach.

i also tried to convert CGImage to UIImage and tried to create video but still facing same issue.

i am getting image data in [UInt8] data so what would be the correct approach converting the image formate and create video ?

In order to create video from [CGImage] following below approach. I am converting [UInt8] data to CGImage using CGDataProvider and convert CGImage to UIImage. I have array of image and collect UIImage and then merge images and create video.

Here my code to convert CGImage from data.

private(set) var data: [UInt8]

var cgImage: CGImage? {

        let colorSpaceRef = CGColorSpaceCreateDeviceRGB()

        let bitsPerComponent = 8
        let bitsPerPixel = channels * bitsPerComponent
        let bytesPerRow = channels * width
        let totalBytes = height * bytesPerRow
        let bitmapInfo = CGBitmapInfo(rawValue: channels == 3 ? CGImageAlphaInfo.none.rawValue : CGImageAlphaInfo.last.rawValue)
        let provider = CGDataProvider( dataInfo: nil,
                                       data: data,
                                       size: totalBytes,
                                       releaseData: {_, _, _  in })!

        return CGImage(width: width,
                       height: height,
                       bitsPerComponent: bitsPerComponent,
                       bitsPerPixel: bitsPerPixel,
                       bytesPerRow: bytesPerRow,
                       space: colorSpaceRef,
                       bitmapInfo: bitmapInfo,
                       provider: provider,
                       decode: nil,
                       shouldInterpolate: false,
                       intent: CGColorRenderingIntent.perceptual)
    }

My app is getting crash here in this function, when i start frequent image drawing to context

(context!.draw(cgImage, in: CGRect(x: 0, y: 0, width: frameWidth, height: frameHeight)))

If i use number of images from bundle and create video using this code its working fine. When i use created CGImage from [UInt8] data, it started getting crash after writing 3-4 images.

func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
        autoreleasepool {
            
            let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
            var pxbuffer:CVPixelBuffer?
            let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
            let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int
            let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
            assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")

            CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
            let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
            let context = CGContext(data: pxdata, width: frameWidth, height: frameHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
            assert(context != nil, "context is nil")
            context!.concatenate(CGAffineTransform.identity)
            context!.draw(cgImage
                          , in: CGRect(x: 0, y: 0, width: frameWidth, height: frameHeight))
            CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
            return pxbuffer
}

here, i am using below code to create video from array of images.

typealias CXEMovieMakerCompletion = (URL) -> Void
typealias CXEMovieMakerUIImageExtractor = (AnyObject) -> UIImage?


public class CXEImagesToVideo: NSObject{
    var assetWriter:AVAssetWriter!
    var writeInput:AVAssetWriterInput!
    var bufferAdapter:AVAssetWriterInputPixelBufferAdaptor!
    var videoSettings:[String : Any]!
    var frameTime:CMTime!
    var fileURL:URL!
    
    var completionBlock: CXEMovieMakerCompletion?
    var movieMakerUIImageExtractor:CXEMovieMakerUIImageExtractor?
    
    
    public class func  videoSettings(codec:String, width:Int, height:Int) -> [String: Any]{
        if(Int(width) % 16 != 0){
            print("warning: video settings width must be divisible by 16")
        }
        
        let videoSettings:[String: Any] = [AVVideoCodecKey: AVVideoCodecType.h264,
                                           AVVideoWidthKey: width,
                                           AVVideoHeightKey: height]
       
        return videoSettings
    }
    
    public init(videoSettings: [String: Any],frameTime: CMTime) {
        super.init()
        self.frameTime = frameTime
        let paths = NSSearchPathForDirectoriesInDomains(.documentDirectory, .userDomainMask, true)
        let tempPath = paths[0] + "/exprotvideo1.mp4"
        if(FileManager.default.fileExists(atPath: tempPath)){
            guard (try? FileManager.default.removeItem(atPath: tempPath)) != nil else {
                print("remove path failed")
                return
            }
        }
        
        self.fileURL = URL(fileURLWithPath: tempPath)
        self.assetWriter = try! AVAssetWriter(url: self.fileURL, fileType: AVFileType.mp4)
        
        self.videoSettings = videoSettings
        self.writeInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
        assert(self.assetWriter.canAdd(self.writeInput), "add failed")
        
        self.assetWriter.add(self.writeInput)
        let bufferAttributes:[String: Any] = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_32ARGB)]
        self.bufferAdapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: self.writeInput, sourcePixelBufferAttributes: bufferAttributes)
        self.frameTime = CMTimeMake(value: 1, timescale: 10)
    }
    
    func createMovieFrom(urls: [URL], withCompletion: @escaping CXEMovieMakerCompletion){
        self.createMovieFromSource(images: urls as [AnyObject], extractor:{(inputObject:AnyObject) ->UIImage? in
                                    return UIImage(data: try! Data(contentsOf: inputObject as! URL))}, withCompletion: withCompletion)
    }
    
    func createMovieFrom(images: [UIImage], withCompletion: @escaping CXEMovieMakerCompletion){
        DispatchQueue.main.async {
            self.createMovieFromSource(images: images, extractor: {(inputObject:AnyObject) -> UIImage? in
                                        return inputObject as? UIImage}, withCompletion: withCompletion)
        }
        
    }
    func imageFromLayer(layer:CALayer) -> UIImage {
        UIGraphicsBeginImageContextWithOptions(layer.frame.size, layer.isOpaque, 0)
        layer.render(in: UIGraphicsGetCurrentContext()!)
        let outputImage = UIGraphicsGetImageFromCurrentImageContext()
        UIGraphicsEndImageContext()
        return outputImage!
    }
    
    
    
    func createMovieFromSource(images: [AnyObject], extractor: @escaping CXEMovieMakerUIImageExtractor, withCompletion: @escaping CXEMovieMakerCompletion){

        self.completionBlock = withCompletion
        
        self.assetWriter.startWriting()
        self.assetWriter.startSession(atSourceTime: CMTime.zero)
        
        let mediaInputQueue = DispatchQueue.init(label: "Main") // DispatchQueue(label: "mediaInputQueue")
        var i = 0
        let frameNumber = images.count
        
            self.writeInput.requestMediaDataWhenReady(on: mediaInputQueue){
                while(true){
                    if(i >= frameNumber){
                        break
                    }
                    if (self.writeInput.isReadyForMoreMediaData){
                        var sampleBuffer:CVPixelBuffer?
                        autoreleasepool{
                            let temp = images[i]
                            let img = extractor(temp)
                            if img == nil{
                                i += 1
                                print("Warning: counld not extract one of the frames")
                                //continue
                            }

                            sampleBuffer = self.newPixelBufferFrom(cgImage: temp.cgImage!)
                            
                        }
                        if (sampleBuffer != nil){
                            if(i == 0){
                                self.bufferAdapter.append(sampleBuffer!, withPresentationTime: CMTime.zero)
                            }else{
                                let value = i - 1
                                let lastTime = CMTimeMake(value: Int64(value), timescale: self.frameTime.timescale)
                                let presentTime = CMTimeAdd(lastTime, self.frameTime)
                                self.bufferAdapter.append(sampleBuffer!, withPresentationTime: presentTime)
                            }
                            i = i + 1
                        }
                    }
                }
                self.writeInput.markAsFinished()
                self.assetWriter.finishWriting {
                    DispatchQueue.main.sync {
                        self.completionBlock!(self.fileURL)
                    }
                }
            }
    }
    
    func newPixelBufferFrom(cgImage:CGImage) -> CVPixelBuffer?{
        autoreleasepool {
            
            let options:[String: Any] = [kCVPixelBufferCGImageCompatibilityKey as String: true, kCVPixelBufferCGBitmapContextCompatibilityKey as String: true]
            var pxbuffer:CVPixelBuffer?
            let frameWidth = self.videoSettings[AVVideoWidthKey] as! Int
            let frameHeight = self.videoSettings[AVVideoHeightKey] as! Int
            let status = CVPixelBufferCreate(kCFAllocatorDefault, frameWidth, frameHeight, kCVPixelFormatType_32ARGB, options as CFDictionary?, &pxbuffer)
            assert(status == kCVReturnSuccess && pxbuffer != nil, "newPixelBuffer failed")

            CVPixelBufferLockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
            let pxdata = CVPixelBufferGetBaseAddress(pxbuffer!)
            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
            let context = CGContext(data: pxdata, width: frameWidth, height: frameHeight, bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pxbuffer!), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
            // CGImageAlphaInfo.noneSkipFirst.rawValue
            assert(context != nil, "context is nil")
           // context?.clear(CGRect(x: 0, y: 0, width: frameWidth, height: frameHeight))
            context!.concatenate(CGAffineTransform.identity)
            context!.draw(cgImage
                          , in: CGRect(x: 0, y: 0, width: frameWidth, height: frameHeight))
            CVPixelBufferUnlockBaseAddress(pxbuffer!, CVPixelBufferLockFlags(rawValue: 0))
            return pxbuffer
}
}
}
Zarna Parikh
  • 181
  • 4

0 Answers0