3

I am trying to take a different approach at combining videos. I am creating a new track for each transformation.

The problem with this code is that the first video is shown and all others are black.

The audio overlay is correct for the entire segment. It looks like the video is not brought in to the composition because the size of the file is 5 M when it should be about 25M. The 5M size correlates to the size of the first clip and the audio track. All of the AVAssets appear to be valid. The files do exist on the file system. Here is the code:

- (void)mergeVideos:(NSMutableArray *)assets withCompletion:(void (^)(NSString *))completion; {


    //    NSMutableArray *instructions = [NSMutableArray new];
    CGSize size = CGSizeZero;
    CMTime currentstarttime = kCMTimeZero;

    int tracknumber = 1;
    int32_t commontimescale = 600;
    CMTime time = kCMTimeZero;

    AVMutableComposition *mutableComposition = [AVMutableComposition composition];
    NSMutableArray *instructions = [[NSMutableArray alloc] init];

    for (NSURL *assetUrl in assets) {

        AVAsset *asset = [AVAsset assetWithURL:assetUrl];

        NSLog(@"Number of tracks: %lu  Incremental track number %i", (unsigned long)[[asset tracks] count], tracknumber);

        // make sure the timescales are correct for these tracks
        CMTime cliptime = CMTimeConvertScale(asset.duration, commontimescale, kCMTimeRoundingMethod_QuickTime);

        AVMutableCompositionTrack *videoCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeVideo
                                                                                           preferredTrackID:kCMPersistentTrackID_Invalid];

        AVAssetTrack *assetTrack = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject;

        NSLog(@"Running time: value = %lld  timescale = %d", time.value, time.timescale);
        NSLog(@"Asset length: value = %lld  timescale = %d", asset.duration.value, asset.duration.timescale);
        NSLog(@"Converted Scale: value = %lld  timescale = %d", cliptime.value, cliptime.timescale);

        NSError *error;

        [videoCompositionTrack insertEmptyTimeRange:CMTimeRangeMake(kCMTimeZero, time)];
        [videoCompositionTrack insertTimeRange:CMTimeRangeMake(time, cliptime)
                                       ofTrack:assetTrack
                                        atTime:time
                                         error:&error];
        if (error) {
            NSLog(@"Error - %@", error.debugDescription);
        }

        // this flips the video temporarily for the front facing camera
        AVMutableVideoCompositionLayerInstruction *inst = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:videoCompositionTrack];

        // set the flipping trasform to the correct tracks
        if ((tracknumber == 2) || (tracknumber == 4) || (tracknumber == 6) || (tracknumber == 8) || (tracknumber == 10)) {
            CGAffineTransform transform = CGAffineTransformMakeRotation(M_PI);
            [inst setTransform:transform atTime:time];
        } else {
            CGAffineTransform transform = assetTrack.preferredTransform;
            [inst setTransform:transform atTime:time];
        }

        // don't block the other videos with your black - needs to be the incremental time
        [inst setOpacity:0.0 atTime:time];

        // add the instructions to the overall array
        [instructions addObject:inst];

        // increment the total time after w use it for this iteration
        time = CMTimeAdd(time, cliptime);

        if (CGSizeEqualToSize(size, CGSizeZero)) {
            size = [asset tracksWithMediaType:AVMediaTypeVideo].firstObject.naturalSize;;
        }

        // incrememt the track counter
        tracknumber++;
    }

    AVMutableVideoCompositionInstruction *mainVideoCompositionInstruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    mainVideoCompositionInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, time);

    mainVideoCompositionInstruction.layerInstructions = instructions;

    // bring all of the video together in the main composition
    AVMutableVideoComposition *mainVideoComposition = [AVMutableVideoComposition videoComposition];
    mainVideoComposition.instructions = [NSArray arrayWithObject:mainVideoCompositionInstruction];

    // setup the audio
    AVMutableCompositionTrack *audioCompositionTrack = [mutableComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                                                                       preferredTrackID:kCMPersistentTrackID_Invalid];


    // Grab the path, make sure to add it to your project!
    NSURL *soundURL = [NSURL fileURLWithPath:[[NSBundle mainBundle] pathForResource:@"bink-bink-lexus-3" ofType:@"aif"]];
    AVURLAsset *soundAsset = [AVURLAsset assetWithURL:soundURL];

    NSError *error;

    // add audio to the entire track
    [audioCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, mutableComposition.duration)
                                   ofTrack:[soundAsset tracksWithMediaType:AVMediaTypeAudio][0]
                                    atTime:kCMTimeZero
                                     error:&error];

    // Set the frame duration to an appropriate value (i.e. 30 frames per second for video).
    //    mainVideoComposition.frameDuration = CMTimeMake(1, 30);
    mainVideoComposition.renderSize = size;

    NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *documentsDirectory = [paths firstObject];
    int number = arc4random_uniform(10000);
    self.outputFile = [documentsDirectory stringByAppendingFormat:@"/export_%i.mov",number];
    AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mutableComposition
                                                                      presetName:AVAssetExportPreset1280x720];

    exporter.outputURL = [NSURL fileURLWithPath:self.outputFile];
    //Set the output file type
    exporter.outputFileType = AVFileTypeQuickTimeMovie;
    exporter.shouldOptimizeForNetworkUse = YES;


    dispatch_group_t group = dispatch_group_create();


    dispatch_group_enter(group);

    [exporter exportAsynchronouslyWithCompletionHandler:^{
        dispatch_group_leave(group);

    }];

    dispatch_group_notify(group, dispatch_get_main_queue(), ^{

        NSLog(@"Export File (Final) - %@", self.outputFile);
        completion(self.outputFile);

    });

}
iPhone Guy
  • 1,031
  • 10
  • 30

3 Answers3

0

Your problem is that by using multiple AVMutableCompositionTracks and inserting a time range at a time after kCMTimeZero, you are causing each subsequent track to have its media appear in the composition at kCMTimeZero. You need to use insertEmptyTimeRange: if you want to pursue this route. It will move the media for that particular track forward in time by the duration of the empty range you insert.

Or, a much much easier way would be to use a single AVMutableCompositionTrack.

Community
  • 1
  • 1
jlw
  • 3,166
  • 1
  • 19
  • 24
  • I really wish I could get it to work with a single AVMutableCompositionTrack, but it has been futile to this point. I have been spending days on this issue and trying all different methods. – iPhone Guy Oct 06 '15 at 20:10
  • I added the insert of the empty time range before adding the track and had the same results as before. %M file that contains the first video, music and no further videos. – iPhone Guy Oct 06 '15 at 20:11
  • Please post the new code, including the lines for inserting the empty time range. Don't remove the old code from your post – jlw Oct 06 '15 at 21:13
  • The new code is: [videoCompositionTrack insertEmptyTimeRange:CMTimeRangeMake(kCMTimeZero, time)]; [videoCompositionTrack insertTimeRange:CMTimeRangeMake(time, cliptime) – iPhone Guy Oct 06 '15 at 21:17
  • The old code: [videoCompositionTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, cliptime) – iPhone Guy Oct 06 '15 at 21:18
  • From the doc linked in my answer: "If you insert an empty time range into the track, any media that was presented during that interval prior to the insertion will be presented instead immediately afterward." For all videos after the first, you should call insertEmptyTimeRange AFTER calling insertTimeRange. It helps to see context if you post your full updated code as an edit to your question. – jlw Oct 06 '15 at 21:21
  • I added the code // this adds the video after the first one is added if (notfirst) [videoCompositionTrack insertEmptyTimeRange:CMTimeRangeMake(kCMTimeZero, time)]; AFTER the insertion - same results. First vide plays, the remainder are black. The audio is correct throughout – iPhone Guy Oct 06 '15 at 21:38
  • See the question http://stackoverflow.com/questions/32699232/ios-combine-three-videos-rotate-the-center-video/32727404?noredirect=1#comment53781920_32727404 for a better method using one track rather then multiple tracks. – iPhone Guy Oct 16 '15 at 17:30
  • This answer is incorrect. If you insert an asset into a track starting at some non-zero time, it will add an empty range before that range automatically. Thus adding it manually won't change the outcome. – Aleks N. Oct 25 '21 at 12:38
0

Refer to this post: iOS Combine three videos - rotate the center video

This post shows how to use a single track rather than multiple tracks.

Community
  • 1
  • 1
iPhone Guy
  • 1,031
  • 10
  • 30
0

I am adding this answer for any wanderers and were unable to solve the problem like me.

You need to move this piece of code

   time = CMTimeAdd(time, cliptime);`

before:

// don't block the other videos with your black - needs to be the incremental time
    [inst setOpacity:0.0 atTime:time];