4

I am trying to overlay two videos, with the foreground video being somewhat alpha transparent. I have been following the Apple Docs as well as This tutorial.

Whenever I try putting two of the same video through my code it doesn't crash; however, when I try feeding it two different videos I receive this error:

VideoMaskingUtils.exportVideo Error: Optional(Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.})
VideoMaskingUtils.exportVideo Description: <AVAssetExportSession: 0x1556be30, asset = <AVMutableComposition: 0x15567f10 tracks = (
"<AVMutableCompositionTrack: 0x15658030 trackID = 1, mediaType = vide, editCount = 1>",
"<AVMutableCompositionTrack: 0x1556e250 trackID = 2, mediaType = vide, editCount = 1>"
)>, presetName = AVAssetExportPresetHighestQuality, outputFileType = public.mpeg-4
Error Domain=AVFoundationErrorDomain Code=-11841 "Operation Stopped" UserInfo={NSLocalizedDescription=Operation Stopped, NSLocalizedFailureReason=The video could not be composed.}

I understand that you can't save a video with an alpha channel on iOS -- I want to flatten the two videos into one opaque video.

When trying to overlap the two videos and apply a PiP style using CATransforms, it crashes; simply overlapping them (w/o alpha or any other effects applied work) Any help is appreciated.

Here's my code (with both approaches in it):

class func overlay(video firstAsset: AVURLAsset, withSecondVideo secondAsset: AVURLAsset, andAlpha alpha: Float) {

    let mixComposition = AVMutableComposition()

    let firstTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)
    let secondTrack = mixComposition.addMutableTrackWithMediaType(AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid)


    guard let firstMediaTrack = firstAsset.tracksWithMediaType(AVMediaTypeVideo).first else { return }
    guard let secondMediaTrack = secondAsset.tracksWithMediaType(AVMediaTypeVideo).first else { return }
    do {
        try firstTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, firstAsset.duration), ofTrack: firstMediaTrack, atTime: kCMTimeZero)
        try secondTrack.insertTimeRange(CMTimeRangeMake(kCMTimeZero, secondAsset.duration), ofTrack: secondMediaTrack, atTime: kCMTimeZero)
    } catch (let error) {
        print(error)
    }

    let width = max(firstMediaTrack.naturalSize.width, secondMediaTrack.naturalSize.width)
    let height = max(firstMediaTrack.naturalSize.height, secondMediaTrack.naturalSize.height)

    let videoComposition = AVMutableVideoComposition()
    videoComposition.renderSize = CGSizeMake(width, height)
    videoComposition.frameDuration = firstMediaTrack.minFrameDuration


    let firstApproach = false
    if firstApproach {
        let mainInstruction = AVMutableVideoCompositionInstruction()
        mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, firstAsset.duration)
        mainInstruction.backgroundColor = UIColor.redColor().CGColor

        let firstlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstTrack)
        firstlayerInstruction.setTransform(firstAsset.preferredTransform, atTime: kCMTimeZero)

        let secondInstruction = AVMutableVideoCompositionInstruction()
        secondInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, secondAsset.duration)
        let backgroundColor = UIColor(colorLiteralRed: 1.0, green: 1.0, blue: 1.0, alpha: alpha)
        secondInstruction.backgroundColor = backgroundColor.CGColor

        let secondlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondTrack)
        secondlayerInstruction.setTransform(secondAsset.preferredTransform, atTime: kCMTimeZero)

        secondInstruction.layerInstructions = [secondlayerInstruction]

        mainInstruction.layerInstructions = [firstlayerInstruction]//, secondlayerInstruction]

        videoComposition.instructions = [mainInstruction, secondInstruction]

    } else {
        let firstLayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: firstMediaTrack)
        firstLayerInstruction.setTransform(firstMediaTrack.preferredTransform, atTime: kCMTimeZero)
        firstLayerInstruction.setOpacity(1.0, atTime: kCMTimeZero)

        let secondlayerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: secondMediaTrack)
        secondlayerInstruction.setTransform(secondMediaTrack.preferredTransform, atTime: kCMTimeZero)
        secondlayerInstruction.setOpacity(alpha, atTime: kCMTimeZero)


        let instruction = AVMutableVideoCompositionInstruction()
        instruction.timeRange = CMTimeRangeMake(kCMTimeZero, min(firstAsset.duration, secondAsset.duration))
        instruction.layerInstructions = [firstLayerInstruction, secondlayerInstruction]

        videoComposition.instructions = [instruction]
    }



    let outputUrl = VideoMaskingUtils.getPathForTempFileNamed("output.mov")

    VideoMaskingUtils.exportCompositedVideo(mixComposition, toURL: outputUrl, withVideoComposition: videoComposition)

    VideoMaskingUtils.removeTempFileAtPath(outputUrl.absoluteString)
}

Here is my exportCompositedVideo function.

private class func exportCompositedVideo(compiledVideo: AVMutableComposition, toURL outputUrl: NSURL, withVideoComposition videoComposition: AVMutableVideoComposition) {
    guard let exporter = AVAssetExportSession(asset: compiledVideo, presetName: AVAssetExportPresetHighestQuality) else { return }
    exporter.outputURL = outputUrl
    exporter.videoComposition = videoComposition
    exporter.outputFileType = AVFileTypeQuickTimeMovie
    exporter.shouldOptimizeForNetworkUse = true
    exporter.exportAsynchronouslyWithCompletionHandler({
        switch exporter.status {
        case .Completed:
            // we can be confident that there is a URL because
            // we got this far. Otherwise it would've failed.
            UISaveVideoAtPathToSavedPhotosAlbum(exporter.outputURL!.path!, nil, nil, nil)
            print("VideoMaskingUtils.exportVideo SUCCESS!")
            if exporter.error != nil {
                print("VideoMaskingUtils.exportVideo Error: \(exporter.error)")
                print("VideoMaskingUtils.exportVideo Description: \(exporter.description)")
            }

            NSNotificationCenter.defaultCenter().postNotificationName("videoExportDone", object: exporter.error)
            break

        case .Exporting:
            let progress = exporter.progress
            print("VideoMaskingUtils.exportVideo \(progress)")

            NSNotificationCenter.defaultCenter().postNotificationName("videoExportProgress", object: progress)
            break

        case .Failed:
            print("VideoMaskingUtils.exportVideo Error: \(exporter.error)")
            print("VideoMaskingUtils.exportVideo Description: \(exporter.description)")

            NSNotificationCenter.defaultCenter().postNotificationName("videoExportDone", object: exporter.error)
            break

        default: break
        }
    })
}
Blake Barrett
  • 206
  • 1
  • 10
  • Similar to this [unanswered question](http://stackoverflow.com/questions/17909906/avfoundation-to-overlay-an-alpha-channel-video-on-another-video). – Blake Barrett May 29 '16 at 23:50
  • See the link at the question above, the solution is to make use of an encoding approach that does support and alpha channel, as described in the link. iOS cannot do that by default with H.264. – MoDJ Aug 11 '16 at 22:34

1 Answers1

0

Your min should be max...

Replace this line

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, min(firstAsset.duration, secondAsset.duration))

With this line and it will work :

instruction.timeRange = CMTimeRangeMake(kCMTimeZero, max(firstAsset.duration, secondAsset.duration))
VC.One
  • 14,790
  • 4
  • 25
  • 57
sanjay
  • 13
  • 3