I'm trying to merge two videos that are originally of different orientations.
Video A - Portrait Orientation - 720x1280 - MOV Video B - Landscape Orientation - 640x480 - MP4
I'm starting by resizing and cropping Video A
- (void)resizeWithStyle:(NSString*)style {
NSString *filePath = [self.lastVideo path];
NSString *newPath = [filePath stringByReplacingOccurrencesOfString:@".mov" withString:@".mp4"];
NSURL *fullPath = [NSURL fileURLWithPath:newPath];
NSURL *path = [NSURL fileURLWithPath:filePath];
NSDictionary *options = @{ AVURLAssetPreferPreciseDurationAndTimingKey : @YES };
AVURLAsset *asset = [[AVURLAsset alloc] initWithURL:self.lastVideo options:options];
NSInteger width = 640;
NSInteger height = 480;
if (([self orientationForTrack:asset] == UIInterfaceOrientationPortrait) || ([self orientationForTrack:asset] == UIInterfaceOrientationPortraitUpsideDown)) {
width = 480;
height = 640;
}
NSLog(@"Write Started");
NSError *error = nil;
NSString *styleKey = AVVideoScalingModeResizeAspectFill;
if ([style isEqualToString:@"fit"]) {
styleKey = AVVideoScalingModeResizeAspect;
}
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:fullPath fileType:AVFileTypeQuickTimeMovie error:&error];
NSParameterAssert(videoWriter);
AVAsset *avAsset = [[AVURLAsset alloc] initWithURL:path options:nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:width], AVVideoWidthKey,
[NSNumber numberWithInt:height], AVVideoHeightKey,
styleKey, AVVideoScalingModeKey,
nil];
AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings];
NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];
NSError *aerror = nil;
AVAssetReader *reader = [[AVAssetReader alloc] initWithAsset:avAsset error:&aerror];
AVAssetTrack *videoTrack = [[avAsset tracksWithMediaType:AVMediaTypeVideo]objectAtIndex:0];
videoWriterInput.transform = videoTrack.preferredTransform;
NSDictionary *videoOptions = [NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput *asset_reader_output = [[AVAssetReaderTrackOutput alloc] initWithTrack:videoTrack outputSettings:videoOptions];
[reader addOutput:asset_reader_output];
//audio setup
AVAssetWriterInput* audioWriterInput = [AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeAudio
outputSettings:nil];
AVAssetReader *audioReader = [AVAssetReader assetReaderWithAsset:avAsset error:&error];
AVAssetTrack* audioTrack = [[avAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0];
AVAssetReaderOutput *readerOutput = [AVAssetReaderTrackOutput assetReaderTrackOutputWithTrack:audioTrack outputSettings:nil];
[audioReader addOutput:readerOutput];
NSParameterAssert(audioWriterInput);
NSParameterAssert([videoWriter canAddInput:audioWriterInput]);
audioWriterInput.expectsMediaDataInRealTime = NO;
[videoWriter addInput:audioWriterInput];
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
[reader startReading];
dispatch_queue_t _processingQueue = dispatch_queue_create("assetAudioWriterQueue", NULL);
[videoWriterInput requestMediaDataWhenReadyOnQueue:_processingQueue usingBlock:
^{
while ([videoWriterInput isReadyForMoreMediaData]) {
CMSampleBufferRef sampleBuffer;
if ([reader status] == AVAssetReaderStatusReading &&
(sampleBuffer = [asset_reader_output copyNextSampleBuffer])) {
BOOL result = [videoWriterInput appendSampleBuffer:sampleBuffer];
CFRelease(sampleBuffer);
if (!result) {
[reader cancelReading];
break;
}
} else {
[videoWriterInput markAsFinished];
switch ([reader status]) {
case AVAssetReaderStatusReading:
// the reader has more for other tracks, even if this one is done
break;
case AVAssetReaderStatusFailed:
[videoWriter cancelWriting];
break;
case AVAssetReaderStatusCompleted:
// your method for when the conversion is done
// should call finishWriting on the writer
//hook up audio track
[audioReader startReading];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
dispatch_queue_t mediaInputQueue = dispatch_queue_create("mediaInputQueue", NULL);
[audioWriterInput requestMediaDataWhenReadyOnQueue:mediaInputQueue usingBlock:^
{
NSLog(@"Request");
NSLog(@"Asset Writer ready :%d",audioWriterInput.readyForMoreMediaData);
while (audioWriterInput.readyForMoreMediaData) {
CMSampleBufferRef nextBuffer;
if ([audioReader status] == AVAssetReaderStatusReading &&
(nextBuffer = [readerOutput copyNextSampleBuffer])) {
NSLog(@"Ready");
if (nextBuffer) {
NSLog(@"NextBuffer");
[audioWriterInput appendSampleBuffer:nextBuffer];
}
}else{
[audioWriterInput markAsFinished];
switch ([audioReader status]) {
case AVAssetReaderStatusCompleted:
[videoWriter finishWritingWithCompletionHandler:^{
if (videoWriter.status != AVAssetWriterStatusFailed && videoWriter.status == AVAssetWriterStatusCompleted) {
NSLog(@"Asset written");
NSLog(@"New Asset Orienatation: %d", [self orientationForTrack:asset]);
[self checkFileExists:fullPath];
[self getVideoProperties:fullPath];
self.lastVideo = fullPath;
//[self showDocumentsContents];
self.libraryVideo = fullPath;
} else {
}
}];
break;
}
}
}
}
];
break;
}
break;
}
}
}
];
NSLog(@"Write Ended");
}
This APPEARS to work well and looks exactly as I would like with ability to aspect fill or fit to 640x480 in H264 (mp4) format. The video now APPEARS to be in landscape format when played back in MPMoviePlayerController.
Next, I'm attempting to join the two videos.
- (void)joinVideo:(id)sender {
if ((self.libraryVideo != nil) && (self.recordVideo != nil)) {
NSString *libraryPath = [self.libraryVideo path];
NSString *outputPath = [libraryPath stringByReplacingOccurrencesOfString:@".mp4" withString:@"-joined.mp4"];
NSURL *outputURL = [NSURL fileURLWithPath:outputPath];
NSLog(@"Can Merge Video");
NSMutableArray *audioTracks = [NSMutableArray array];
NSMutableArray *videoTracks = [NSMutableArray array];
NSDictionary *options = @{ AVURLAssetPreferPreciseDurationAndTimingKey : @YES };
AVURLAsset *asset2 = [AVURLAsset URLAssetWithURL:self.recordVideo options:options];
[videoTracks addObjectsFromArray:[asset2 tracksWithMediaType:AVMediaTypeVideo]];
[audioTracks addObjectsFromArray:[asset2 tracksWithMediaType:AVMediaTypeAudio]];
NSLog(@"Asset 2 Orienatation: %d", [self orientationForTrack:asset2]);
AVURLAsset *asset1 = [AVURLAsset URLAssetWithURL:self.libraryVideo options:options];
[videoTracks addObjectsFromArray:[asset1 tracksWithMediaType:AVMediaTypeVideo]];
[audioTracks addObjectsFromArray:[asset1 tracksWithMediaType:AVMediaTypeAudio]];
NSLog(@"Asset 1 Orienatation: %d", [self orientationForTrack:asset1]);
AVMutableComposition *composition = [[AVMutableComposition alloc] init];
AVMutableVideoComposition *videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1,30);
videoComposition.renderScale = 1.0;
if ([audioTracks count] > 0) {
AVMutableCompositionTrack * audioTrackComposition = [composition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[audioTracks enumerateObjectsUsingBlock:^(AVAssetTrack *track, NSUInteger idx, BOOL *stop) {
[audioTrackComposition insertTimeRange:CMTimeRangeMake(kCMTimeZero, track.timeRange.duration)
ofTrack:track
atTime:kCMTimeZero
error:nil];
}];
}
AVMutableCompositionTrack *videoTrackComposition = [composition addMutableTrackWithMediaType:AVMediaTypeVideo
preferredTrackID:kCMPersistentTrackID_Invalid];
[videoTracks enumerateObjectsUsingBlock:^(AVAssetTrack *track, NSUInteger idx, BOOL *stop) {
[videoTrackComposition insertTimeRange:CMTimeRangeMake(kCMTimeZero, track.timeRange.duration)
ofTrack:track
atTime:kCMTimeZero
error:nil];
}];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableVideoCompositionLayerInstruction *layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoTrackComposition];
AVAssetTrack *sourceVideoTrack = [[asset2 tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGAffineTransform rotationTransform = CGAffineTransformMakeRotation(M_PI/2);
CGAffineTransform rotateTranslate = CGAffineTransformTranslate(rotationTransform,320,0);
[videoTrackComposition setPreferredTransform:sourceVideoTrack.preferredTransform];
[layerInstruction setTransform:rotateTranslate atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject: layerInstruction];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
AVAssetExportSession *exportSession = [[AVAssetExportSession alloc] initWithAsset:composition
presetName:AVAssetExportPresetPassthrough];
exportSession.outputFileType = AVFileTypeMPEG4;
exportSession.shouldOptimizeForNetworkUse = YES;
exportSession.outputURL = outputURL;
[exportSession exportAsynchronouslyWithCompletionHandler:^ {
switch (exportSession.status) {
case AVAssetExportSessionStatusFailed: {
NSLog(@"Join Failed");
break;
}
case AVAssetExportSessionStatusCompleted: {
NSLog(@"Join Completed");
[self checkFileExists:outputURL];
[self getVideoProperties:outputURL];
self.lastVideo = outputURL;
break;
}
case AVAssetExportSessionStatusCancelled: {
NSLog(@"Join Cancelled");
break;
}
default:
break;
}
}];
}
}
This successfully joins the videos. However, Video A is now rotated 90 degrees while Video B looks correct.
I've spent a great deal of time trying to figure out why Video A is not being treated as a true landscape video 640x480 in this merge. I've attempted running a separate operation on Video A to force a preferredTransform change but it doesn't seem to do anything. Also, setting preferred transform on original AVAssetWriterInput doesn't seem to have an affect either.
Any ideas for something that truly works? Everything I've tried ends up with Video A also rotated 90 degrees and stretched on merge.
Is there any way to force an orientation on input from the Photo Library?
Thanks!