I'm trying two merge 2 videos with a 2 second overlap. In this overlap I'd like to fade the second video in (or fade the first one out to reveal the second, either one would be great).
The first video is fading out 2 seconds before the end as intended, but as it fades I get a black screen instead of the second video fading in. At the end of video 1, video 2 shows up half way through its fade in animation.
What am I doing wrong with the tracks that I can't see them overlapping? Below is my code
func setupVideo() {
let url = URL(fileURLWithPath: Bundle.main.path(forResource: "demoVideoTwo", ofType: "mp4")!)
let assetOne = AVAsset(url: url)
let urlTwo = URL(fileURLWithPath: Bundle.main.path(forResource: "demoVideoThree", ofType: "mp4")!)
let assetTwo = AVAsset(url: urlTwo)
let mixComposition = AVMutableComposition()
var instructions = [AVMutableVideoCompositionLayerInstruction]()
var mainInstructionList = [AVMutableVideoCompositionInstruction]()
var lastTime = CMTime.zero
// Create Track One
guard let videoTrack = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), let audioTrack = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
return
}
// Setup AVAsset 1
let timeRange = CMTimeRangeMake(start: CMTime.zero, duration: assetOne.duration)
do {
try videoTrack.insertTimeRange(timeRange, of: assetOne.tracks(withMediaType: .video)[0], at: lastTime)
try audioTrack.insertTimeRange(timeRange, of: assetOne.tracks(withMediaType: .audio)[0], at: lastTime)
} catch {
print(error)
}
// Setup Layer Instruction 1
let layerInstruction = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack)
let duration = CMTime(seconds: 2, preferredTimescale: 60)
let transitTime = CMTime(seconds: 2, preferredTimescale: 60)
let insertTime = CMTimeSubtract(assetOne.duration, transitTime)
let instRange = CMTimeRangeMake(start: insertTime, duration: duration)
layerInstruction.setOpacityRamp(fromStartOpacity: 1.0, toEndOpacity: 0.0, timeRange: instRange)
instructions.append(layerInstruction)
let mainInstruction = AVMutableVideoCompositionInstruction()
mainInstruction.timeRange = CMTimeRangeMake(start: lastTime, duration: assetOne.duration)
mainInstruction.layerInstructions = instructions
mainInstructionList.append(mainInstruction)
lastTime = CMTimeAdd(lastTime, assetOne.duration)
// Create Track One
guard let videoTrackTwo = mixComposition.addMutableTrack(withMediaType: .video, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)), let audioTrackTwo = mixComposition.addMutableTrack(withMediaType: .audio, preferredTrackID: Int32(kCMPersistentTrackID_Invalid)) else {
return
}
// Setup AVAsset 2
let transitionTime = CMTime(seconds: 2, preferredTimescale: 60)
let newLastTime = CMTimeSubtract(assetOne.duration, transitionTime)
let timeRangeTwo = CMTimeRangeMake(start: CMTime.zero, duration: assetTwo.duration)
do {
try videoTrackTwo.insertTimeRange(timeRangeTwo, of: assetTwo.tracks(withMediaType: .video)[0], at: newLastTime)
try audioTrackTwo.insertTimeRange(timeRangeTwo, of: assetTwo.tracks(withMediaType: .audio)[0], at: newLastTime)
} catch {
print(error)
}
// Setup Layer Instruction 2
let layerInstructionTwo = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrackTwo)
let durationTwo = CMTime(seconds: 4, preferredTimescale: 60)
let instRangeTwo = CMTimeRangeMake(start: newLastTime, duration: durationTwo)
layerInstructionTwo.setOpacityRamp(fromStartOpacity: 0.0, toEndOpacity: 1.0, timeRange: instRangeTwo)
instructions.append(layerInstructionTwo)
let mainInstructionTwo = AVMutableVideoCompositionInstruction()
mainInstructionTwo.timeRange = CMTimeRangeMake(start: lastTime, duration: assetTwo.duration)
mainInstructionTwo.layerInstructions = instructions
mainInstructionList.append(mainInstructionTwo)
// Setup Video Composition
let mainComposition = AVMutableVideoComposition()
mainComposition.instructions = mainInstructionList
mainComposition.frameDuration = CMTimeMake(value: 1, timescale: 60)
mainComposition.renderSize = videoTrack.naturalSize
let item = AVPlayerItem(asset: mixComposition)
item.videoComposition = mainComposition
player = AVPlayer(playerItem: item)
let playerLayer: AVPlayerLayer = {
let layer = AVPlayerLayer(player: player)
layer.videoGravity = .resizeAspectFill
return layer
}()
let playerWidth: CGFloat = UIScreen.main.bounds.size.width
let videoHeight = UIScreen.main.bounds.size.width * 9 / 16
playerLayer.frame = CGRect(x: 0, y: 0, width: playerWidth, height: videoHeight)
self.layer.addSublayer(playerLayer)
}