I'm trying to take a video from the camera and export it as a square. I'm testing it on an iPad Air with front and rear camera.
Everything works well when I capture video with a rear camera - video is cropped just as I want. Unfortunately, it's cropped wrong when I try to export video taken from the front camera.
Translation seems to be wrong because I get large black stripes at the bottom of the video. Does anyone have a clue what am I doing wrong?
Note: I'm testing it on iOS 9 - not sure if that could be the source of the problem.
- (AVComposition *)trimmedAndCroppedVideoComposition
{
AVMutableComposition *composition = [AVMutableComposition composition];
AVURLAsset *sourceAsset = [[AVURLAsset alloc] initWithURL:self.media.videoURL
options:@{AVURLAssetPreferPreciseDurationAndTimingKey: @(YES)}];
CMTimeRange timeRange = self.media.trimmedVideoRange;
[composition insertTimeRange:timeRange ofAsset:sourceAsset atTime:kCMTimeZero error:nil];
AVAssetTrack *track = [[sourceAsset tracksWithMediaType:AVMediaTypeVideo] firstObject];
AVMutableCompositionTrack *compositionTrack = [[composition tracksWithMediaType:AVMediaTypeVideo] firstObject];
CGSize videoSize = CGSizeApplyAffineTransform(track.naturalSize, track.preferredTransform);
videoSize = CGSizeMake(fabs(videoSize.width), fabs(videoSize.height));
CGFloat fillScale = MAX(self.renderSize.width / videoSize.width,
self.renderSize.height / videoSize.height);
CGAffineTransform orientationTransform = track.preferredTransform;
if (orientationTransform.tx == videoSize.width || orientationTransform.tx == videoSize.height) {
orientationTransform.tx = self.renderSize.width;
}
if (orientationTransform.ty == videoSize.width || orientationTransform.ty == videoSize.height) {
orientationTransform.ty = self.renderSize.width;
}
CGAffineTransform t1 = CGAffineTransformScale(CGAffineTransformIdentity, fillScale, fillScale);
CGAffineTransform t2 = CGAffineTransformConcat(t1, orientationTransform);
CGRect cropRect = CGRectMake(0, 0.5, 1, 0.5);
CGAffineTransform t3 = CGAffineTransformConcat(t2, CGAffineTransformMakeTranslation
(-cropRect.origin.x * videoSize.width * fillScale,
-cropRect.origin.y * videoSize.height * fillScale));
compositionTrack.preferredTransform = t3;
return [composition copy];
}
- (void)_exportVideo:(void (^)(void))completion
{
// Trimmed and cropped Asset
AVComposition *trimmedAsset = [self trimmedAndCroppedVideoComposition];
// Input clip
AVAssetTrack *clipVideoTrack = [[trimmedAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, trimmedAsset.duration);
// Apple transform
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction
videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
CGAffineTransform finalTransform = clipVideoTrack.preferredTransform;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
// Make it square
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = CGSizeMake(self.renderSize.width,
self.renderSize.height);
videoComposition.frameDuration = CMTimeMake(1, 30);
videoComposition.instructions = [NSArray arrayWithObject: instruction];
// Export
self.exporter = [[AVAssetExportSession alloc] initWithAsset:trimmedAsset presetName:AVAssetExportPresetMediumQuality];
self.exporter.videoComposition = videoComposition;
self.exporter.outputURL=[NSURL fileURLWithPath:outputPath];
self.exporter.outputFileType=AVFileTypeQuickTimeMovie;
[self.exporter exportAsynchronouslyWithCompletionHandler:^(void){
...
}];
}