I am using AVFoundation to watermark a video with another video that has transparency in it. I have managed to overlay one video on top of the other using the following code, but as soon as I start using an asset with transparency in it, the export fails without any helpful error.
This link talks about PreRes 4444 being the only codec that AVFoundation supports when it comes to alpha channels, but I cannot find any official documentation on that. The file that I am currently trying to add as an overlay is an H.264 encoded mp4, which seems to be the best choice after reading Learning AVFoundation, where it states that
The ProRes codecs are available only on OS X. If you're developing only for iOS, H264 is the only game in town.
I can always fall back to adding an animation layer instead of a video overlay, but I would be surprised if there were no solutions to this.
- (void)addWatermarkToAsset:(NSURL *)assetURL completionHandler:(void (^)(NSURL *videoURL))handler
{
AVURLAsset *videoAsset = [AVURLAsset URLAssetWithURL:assetURL options:nil];
// This asset contains an alpha channel, and has a shorter duration than videoAsset
NSURL *animationUrl = [[NSBundle mainBundle] URLForResource:@"InstagramAnimation" withExtension:@"mp4"];
AVURLAsset *animationAsset = [AVURLAsset URLAssetWithURL:animationUrl options:nil];
AVMutableComposition *mixComposition = [[AVMutableComposition alloc] init];
AVMutableCompositionTrack *videoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration)
ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableCompositionTrack *animationTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[animationTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, animationAsset.duration)
ofTrack:[[animationAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]
atTime:kCMTimeZero error:nil];
AVMutableVideoCompositionInstruction *mainInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
mainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, videoAsset.duration);
AVMutableVideoCompositionLayerInstruction *videoLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrack];
CGAffineTransform scale = CGAffineTransformMakeScale(0.7f,0.7f);
CGAffineTransform move = CGAffineTransformMakeTranslation(230,230);
[videoLayerInstruction setTransform:CGAffineTransformConcat(scale, move) atTime:kCMTimeZero];
AVMutableVideoCompositionLayerInstruction *animationLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:animationTrack];
CGAffineTransform secondScale = CGAffineTransformMakeScale(1.2f,1.5f);
CGAffineTransform secondMove = CGAffineTransformMakeTranslation(0,0);
[animationLayerInstruction setTransform:CGAffineTransformConcat(secondScale, secondMove) atTime:kCMTimeZero];
mainInstruction.layerInstructions = [NSArray arrayWithObjects:videoLayerInstruction, animationLayerInstruction, nil];
AVMutableVideoComposition *mainCompositionInst = [AVMutableVideoComposition videoComposition];
mainCompositionInst.instructions = [NSArray arrayWithObject:mainInstruction];
mainCompositionInst.frameDuration = CMTimeMake(1, 30);
mainCompositionInst.renderSize = videoTrack.naturalSize;
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
NSString *documentsDirectory = [paths objectAtIndex:0];
NSString *myPathDocs = [documentsDirectory stringByAppendingPathComponent:
[NSString stringWithFormat:@"FinalVideo-%d.mov",arc4random() % 1000]];
NSURL *url = [NSURL fileURLWithPath:myPathDocs];
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition
presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL = url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.shouldOptimizeForNetworkUse = YES;
exporter.videoComposition = mainCompositionInst;
[exporter exportAsynchronouslyWithCompletionHandler:^{
dispatch_async(dispatch_get_main_queue(), ^{
[self exportDidFinish:exporter];
});
}];
}