2

I know this is a frequently asked question, I've looked around and there is no distinct answer.

The code is from an article: http://www.netwalk.be/article/record-square-video-ios

What I want: I want to crop the video to make it square.

So basically this is what I'm doing. It makes sense to me, but for some reason the video is not cropping, in fact, its staying the same size (width and height).

AVAsset* asset = [AVAsset assetWithURL:self.videoURL];

AVMutableComposition *composition = [AVMutableComposition composition];
[composition  addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

// input clip
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];

// make it square
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = CGSizeMake(3.0 , 3.0);
videoComposition.frameDuration = CMTimeMake(1, 30);

AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(clipVideoTrack.naturalSize.width, clipVideoTrack.naturalSize.width) );

// rotate to portrait
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);

CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];

AVAssetExportSession *exporter;

// export
exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
exporter.videoComposition = videoComposition;
exporter.outputURL=self.videoURL;
exporter.outputFileType=AVFileTypeMPEG4;


[exporter exportAsynchronouslyWithCompletionHandler:^(void){
    NSLog(@"Exporting done!");

}]; 

I believe the problem is in the exporter at the end. Either its not exporting correctly, or there is something else I'm missing. Please someone refer me to a good way to do it. Thanks.

RJiryes
  • 951
  • 10
  • 25
  • Did the solution below fix your problem. I've having the exact same issue and I can't seem to figure out why the video is staying the same size? – brian Scroggins Sep 18 '15 at 16:49
  • Please help me if you were able to solve this. http://stackoverflow.com/questions/43451879/square-video-using-avfoundation – Ankit Kumar Gupta Apr 17 '17 at 13:22

2 Answers2

4

You need to save the results of

[composition  addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

into

AVMutableCompositionTrack *theTrack = [composition  addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

You need to fix your time range... you're using the width and height in a time range by accident. You should be using the clipVideoTrack.timeRange.

Finally, insert the video clip into the composition track.

NSError *error; //always check this
[theTrack insertTimeRange:timeRange ofTrack:clipVideoTrack atTime:kCMTimeZero error:&error];

EDIT:

Here is some sample code that will export the center square of a video.

Declare a property for your AVAssetExportSession:

@interface YourClassHere () 
@property (nonatomic) AVAssetExportSession *exporter;
@end

Then in your method:

// output file
NSString* outputPath = <# your output path here #>;
if ([[NSFileManager defaultManager] fileExistsAtPath:outputPath])
    [[NSFileManager defaultManager] removeItemAtPath:outputPath error:nil];

// input file
AVAsset* asset = [AVAsset assetWithURL:[NSURL fileURLWithPath:<# your path here #>]];

AVMutableComposition *composition = [AVMutableComposition composition];
[composition  addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];

// input clip
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
CGAffineTransform transform = clipVideoTrack.preferredTransform;

//get actual display size of video
CGSize videoSize;
if ((transform.a == 0 && transform.b == 1 && transform.c == -1 && transform.d == 0) // rotate 90
    || (transform.a == 0 && transform.b == -1 && transform.c == 1 && transform.d == 0)) { // rotate -90
    videoSize = CGSizeMake(clipVideoTrack.naturalSize.height,clipVideoTrack.naturalSize.width);
} else {
    videoSize = clipVideoTrack.naturalSize;
}
CGFloat squareDimension = fminf(videoSize.width,videoSize.height);

// make render size square
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.renderSize = CGSizeMake(squareDimension,squareDimension);
videoComposition.frameDuration = CMTimeMake(1, 30);

AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, kCMTimePositiveInfinity);

// shift video to be in the center
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
CGAffineTransform translation = CGAffineTransformMakeTranslation(- (videoSize.width - squareDimension)/2, -(videoSize.height - squareDimension) /2 );
CGAffineTransform finalTransform = CGAffineTransformConcat(transform, translation);

[transformer setTransform:finalTransform atTime:kCMTimeZero];
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];

// export
self.exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
self.exporter.videoComposition = videoComposition;
self.exporter.outputURL=[NSURL fileURLWithPath:outputPath];
self.exporter.outputFileType=AVFileTypeQuickTimeMovie;

[self.exporter exportAsynchronouslyWithCompletionHandler:^(void){
    switch(self.exporter.status) {
        case AVAssetExportSessionStatusCompleted:
            NSLog(@"file exported successfully");
            break;
        default:
            NSLog(@"file did not export successfully");
    }
}];
jlw
  • 3,166
  • 1
  • 19
  • 24
  • 1
    @jiw I'm having a similar problem but i'm confused on where I would add the code you proposed in the original code given, can you help? – brian Scroggins Sep 18 '15 at 17:21
0

Please have a look at this repo. It will solve all your issues, I believe.

https://github.com/ankit0812/CustomCamera

Ankit Kumar Gupta
  • 3,994
  • 4
  • 31
  • 54