0

Following is a simple two video merging code using AVFoundation's AVMutableComposition. I use two different mutable tracks for two videos and manage their layer opacity according to time:

import UIKit
import AVFoundation
import AVKit

class ViewController: UIViewController {


  let asset1 = AVAsset(url: Bundle.main.url(forResource: "4k", withExtension: "mp4")!)
  let asset2 = AVAsset(url: Bundle.main.url(forResource: "football", withExtension: "mp4")!)

  let composition = AVMutableComposition()
  let videoComposition = AVMutableVideoComposition()

  let player = AVPlayer()
  var playerItem: AVPlayerItem!

  override func viewDidLoad() {
    super.viewDidLoad()

    let videoTrack1 = asset1.tracks(withMediaType: .video).first!
    let videoTrack2 = asset2.tracks(withMediaType: .video).first!

    let mutableVideoTrack1 = composition.addMutableTrack(withMediaType: .video, preferredTrackID: 1)!
    try! mutableVideoTrack1.insertTimeRange(
      CMTimeRange(start: .zero, duration: videoTrack1.timeRange.duration),
      of: videoTrack1,
      at: .zero
    )

    let mutableVideoTrack2 = composition.addMutableTrack(withMediaType: .video, preferredTrackID: 2)!
    try! mutableVideoTrack2.insertTimeRange(
      videoTrack2.timeRange,
      of: videoTrack2,
      at: videoTrack1.timeRange.duration
    )

    let videoInst = AVMutableVideoCompositionInstruction()
    videoInst.timeRange = CMTimeRange(
      start: .zero,
      duration: CMTimeAdd(videoTrack1.timeRange.duration, videoTrack2.timeRange.duration)
    )

    let layerInst1 = AVMutableVideoCompositionLayerInstruction(assetTrack: mutableVideoTrack1)
    layerInst1.setOpacity(1, at: .zero)
    layerInst1.setOpacity(0, at: videoTrack1.timeRange.duration)

    let layerInst2 = AVMutableVideoCompositionLayerInstruction(assetTrack: mutableVideoTrack2)
    layerInst2.setOpacity(0, at: .zero)
    layerInst2.setOpacity(1, at: videoTrack1.timeRange.duration)

    videoInst.layerInstructions = [layerInst1, layerInst2]

    videoComposition.renderSize = videoTrack1.naturalSize
    videoComposition.frameDuration = CMTime(value: 1, timescale: 30)
    videoComposition.instructions = [videoInst]

  }

  @IBAction func buttonTapped(_ sender: UIButton) {
    playerItem = AVPlayerItem(asset: composition)
    playerItem.videoComposition = videoComposition
    player.replaceCurrentItem(with: playerItem)

    let vc = AVPlayerViewController()
    vc.player = player
    present(vc, animated: true) {
      self.player.play()
    }
  }

}

It works perfectly fine when run on physical device but fails to run on simulator. I tried this with multiple Xcode versions and multiple simulators with no luck. Can anyone help me understand why it doesn't run on simulator? Does anyone else get same issue?

Rohan Sanap
  • 2,773
  • 2
  • 21
  • 39

0 Answers0