4

I know this question has been asked a few times in past and I have read responses to those. But nothing seems to work the way I want. There are multiple videos and all are added in queue of AVQueuePlayer.

I have tried adding in two ways as mentioned in other pages:

AVPlayerItem *item1 = [AVPlayerItem playerItemWithURL:url1];
AVPlayerItem *item2 = [AVPlayerItem playerItemWithURL:url2];

NSArray *playerItems = [[NSArray alloc] initWithObjects:item1, item2, nil];
avPlayer = [[AVQueuePlayer alloc] initWithItems:playerItems]; 

And this way:

    avPlayer = [[AVQueuePlayer alloc] init];

    AVURLAsset *asset1 = [[AVURLAsset alloc] initWithURL:url1 options:nil];
    NSArray *keys = [NSArray arrayWithObject:@"playable"];

    [asset loadValuesAsynchronouslyForKeys:keys completionHandler:^()
        {
            dispatch_async(dispatch_get_main_queue(), ^
                           {
                               AVPlayerItem *playerItem = [[AVPlayerItem alloc] initWithAsset:asset1];
                               [avPlayer insertItem:playerItem afterItem:nil];
                           });

        }];
AVURLAsset *asset2 = [[AVURLAsset alloc] initWithURL:url2 options:nil];
[asset loadValuesAsynchronouslyForKeys:keys completionHandler:^()
         {
             dispatch_async(dispatch_get_main_queue(), ^
                            {
                                AVPlayerItem *playerItem = [[AVPlayerItem alloc] initWithAsset:asset2];
                                [avPlayer insertItem:playerItem afterItem:nil];
                            });

         }];

But none of this is able to remove that black screen while advancing to next item. There is gap of around 1 second before next item starts playing. How can I remove this gap?

UPDATE: I also tried with AVMutableComposition. The gaps are reduced significantly but still are noticeable. Is there ANY way to remove these gaps completely?

AVMutableComposition code:

AVMutableComposition* mixComposition = [[AVMutableComposition alloc] init];

NSMutableArray *arrayInstruction = [[NSMutableArray alloc] init];

AVMutableVideoCompositionInstruction * MainInstruction =
[AVMutableVideoCompositionInstruction videoCompositionInstruction];
AVMutableCompositionTrack *audioTrack;

audioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio
                                         preferredTrackID:kCMPersistentTrackID_Invalid];

CMTime duration = kCMTimeZero;

for(int i = 0; i <= 5; i++)
{
    AVAsset *currentAsset;
    currentAsset = [self currentAsset:i]; // i take the for loop for getting the asset
        AVMutableCompositionTrack *currentTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
        [currentTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:duration error:nil];

        [audioTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, currentAsset.duration) ofTrack:[[currentAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:duration error:nil];

        AVMutableVideoCompositionLayerInstruction *currentAssetLayerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:currentTrack];
        AVAssetTrack *currentAssetTrack = [[currentAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
        ALAssetOrientation currentAssetOrientation  = ALAssetOrientationUp;
        BOOL  isCurrentAssetPortrait  = YES;
        CGAffineTransform currentTransform = currentAssetTrack.preferredTransform;

        if(currentTransform.a == 0 && currentTransform.b == 1.0 && currentTransform.c == -1.0 && currentTransform.d == 0)  {currentAssetOrientation= ALAssetOrientationRight; isCurrentAssetPortrait = YES;}
        if(currentTransform.a == 0 && currentTransform.b == -1.0 && currentTransform.c == 1.0 && currentTransform.d == 0)  {currentAssetOrientation =  ALAssetOrientationLeft; isCurrentAssetPortrait = YES;}
        if(currentTransform.a == 1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == 1.0)   {currentAssetOrientation =  ALAssetOrientationUp;}
        if(currentTransform.a == -1.0 && currentTransform.b == 0 && currentTransform.c == 0 && currentTransform.d == -1.0) {currentAssetOrientation = ALAssetOrientationDown;}

        CGFloat FirstAssetScaleToFitRatio = 640.0/640.0;
        if(isCurrentAssetPortrait){
            FirstAssetScaleToFitRatio = 640.0/640.0;
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor) atTime:duration];
        }else{
            CGAffineTransform FirstAssetScaleFactor = CGAffineTransformMakeScale(FirstAssetScaleToFitRatio,FirstAssetScaleToFitRatio);
            [currentAssetLayerInstruction setTransform:CGAffineTransformConcat(CGAffineTransformConcat(currentAssetTrack.preferredTransform, FirstAssetScaleFactor),CGAffineTransformMakeTranslation(0, 0)) atTime:duration];
        }
        duration=CMTimeAdd(duration, currentAsset.duration);
        [arrayInstruction addObject:currentAssetLayerInstruction];
}

MainInstruction.timeRange = CMTimeRangeMake(kCMTimeZero, duration);
MainInstruction.layerInstructions = arrayInstruction;
AVMutableVideoComposition *MainCompositionInst = [AVMutableVideoComposition videoComposition];

MainCompositionInst.instructions = [NSArray arrayWithObject:MainInstruction];
MainCompositionInst.frameDuration = CMTimeMake(1, 30);
MainCompositionInst.renderSize = CGSizeMake(640.0, 640.0);

NSString* filename = [NSString stringWithFormat:@"mergedVideo.mp4"];
pathForFile = [NSTemporaryDirectory() stringByAppendingPathComponent:filename];
NSFileManager *fileManager = [NSFileManager defaultManager];
BOOL delete = [fileManager removeItemAtPath:pathForFile error:NULL];
NSLog(@"Deletion Succesful???? :: %d",delete);

NSURL *url = [NSURL fileURLWithPath:pathForFile];
NSLog(@"\n\nurl ::::::::::: %@\n\n",url);
NSError *err;
if ([url checkResourceIsReachableAndReturnError:&err] == NO)
    NSLog(@"\n\nFINEEEEEEEEEEEEE\n\n");
else
    NSLog(@"\n\nERRRRRORRRRRRRRRRRRRR\n\n");

AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
exporter.outputURL=url;
exporter.outputFileType = AVFileTypeQuickTimeMovie;
exporter.videoComposition = MainCompositionInst;
exporter.shouldOptimizeForNetworkUse = YES;
[exporter exportAsynchronouslyWithCompletionHandler:^
 {
     switch (exporter.status)
     {
         case AVAssetExportSessionStatusCompleted:
         {
             NSURL *outputURL = exporter.outputURL;

             ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
             if ([library videoAtPathIsCompatibleWithSavedPhotosAlbum:outputURL]) {

                 ALAssetsLibrary* library = [[ALAssetsLibrary alloc]init];
                 [library writeVideoAtPathToSavedPhotosAlbum:outputURL completionBlock:^(NSURL *assetURL, NSError *error)
                  {
                      NSLog(@"ASSET URL %@",assetURL);
                      if (error)
                      {
                          NSLog(@"EROR %@ ", error);
                      }else{
                          NSLog(@"VIDEO SAVED ");
                      }

                  }];
                 NSLog(@"Video Merge SuccessFullt");
                 currentFile ++;
             }
         }
             break;
         case AVAssetExportSessionStatusFailed:
             NSLog(@"Failed:%@", exporter.error.description);
             break;
         case AVAssetExportSessionStatusCancelled:
             NSLog(@"Canceled:%@", exporter.error);
             break;
         case AVAssetExportSessionStatusExporting:
             NSLog(@"Exporting!");
             break;
         case AVAssetExportSessionStatusWaiting:
             NSLog(@"Waiting");
             break;
         default:
             break;
     }
 }];
blancos
  • 1,576
  • 2
  • 16
  • 38
  • I think the first solution is better, but this code are where, on viewDidLoad, please post your complete code here. – Acácio Veit Schneider Mar 18 '15 at 14:44
  • @AcácioVeitSchneider AVQueuePlayer altogether looks bad idea using both approaches. It has like 1 - 1.5 seconds of gap while changing the track. It is in a method called from viewDidLoad. – blancos Mar 21 '15 at 06:03
  • Try mixComposition.duration in place of your duration. – Abhi Mar 25 '15 at 10:22

1 Answers1

4

For Ultravisual we used AVMutableComposition, and as long as we built up the composition first and then built a player to play it, we were able to get flawless gap-free playback everywhere except when looping.

Can you walk through all the tracks in your AVMutableComposition and verify there are no gaps? Don't forget the audio tracks. Sometimes audio and video have different timestamps - you may need to add another track to your AVMutableComposition to get around this.

damian
  • 3,604
  • 1
  • 27
  • 46
  • I posted my code for merging. I will appreciate if you can tell what is wrong. – blancos Mar 25 '15 at 09:33
  • I can't tell by looking at the code. Walk through all the `AVCompositionTrack`s in `mixComposition.tracks`, for each `track` walk through all the `AVCompositionTrackSegments` via `track.segments` and print its start and end time to the debug console. Copy to a text editor, sort by time, eyeball until you can see the gaps in the numbers. It might help to differentiate tracks by whether they're video or audio. – damian Mar 25 '15 at 10:15
  • I don't see any property in AVCompositionTrackSegment to print timestamps. Can you elaborate on these properties and their names? – blancos Mar 26 '15 at 12:20
  • The property you want is `timeMapping`. It's defined in `AVAssetTrackSegment`, which is the superclass of `AVCompositionTrackSegment`. The Apple docs are very good for all these classes and will tell you everything you need. – damian Mar 26 '15 at 13:02
  • I printed logs but I am pretty confused. How many tracks are supposed to be there? Example: I recorded 5 videos. Track 0 had 5 segments with one video url each. Upto here it is clear. But then there are Track 1 and Track 2 with random number of segments every time. And most of the segment url is null. – blancos Mar 27 '15 at 10:42
  • You loop 5 times, you say `addMutableTrackWithMediaType:AVMediaTypeVideo` every time, and you also have 1 audio track. Therefore there should in theory be 6 tracks. However, `AVComposition` may be doing some optimisation behind your back (merging compatible tracks together) -- you should really use `mutableTrackCompatibleWithTrack` to find out if you already have a track that is compatible with `currentAsset`'s video track. As I mentioned above, it might help if you print out for each track if it is video or audio. – damian Mar 27 '15 at 13:02
  • Bonus thoughts. If track 0 is video and 1 and 2 are audio, then you can determine where the black gaps are coming from by looking at the start and end timestamps for each segment on track 0. If there are gaps between `CMTimeRangeGetEnd(segment[i].timeMapping.target.start)` and `segment[i+1].timeMapping.start`, then that's where your black frames are coming from. – damian Mar 27 '15 at 13:10
  • Ok I analysed as you said and time stamps are absolutely correct(upto 6 decimal places). But still on playback that blink is there. Do I have to do something special during playback in AVPlayer? – blancos Mar 30 '15 at 06:41
  • Check the first and last frames in the source video, maybe you have black frames at the beginning/end. Open them up in QuickTime Player 7 (not any newer version) and walk through the frames using the arrow keys, and/or check the properties, looking at the timelines of the audio vs the video track – damian Mar 30 '15 at 09:48
  • Checked. No black frames were visible in quick time player – blancos Mar 30 '15 at 10:12
  • I just noticed that you're creating an `AVVideoComposition` yourself. What happens if you use `AVVideoComposition* mainCompositionInst = [AVVideoComposition compositionWithPropertiesOfAsset:mixComposition]`? – damian Mar 30 '15 at 11:12
  • I used it to assign videoComposition property of exporter with instruction, frameDuration etc – blancos Mar 30 '15 at 11:40
  • Right - how do you know that the frameDuration is correct? If you have 60fps (or 24fps) source material and 30fps `frameDuration`, that might cause black frames. – damian Mar 30 '15 at 11:45
  • Ok so what should I set it to? how to get frameDuration of source material? – blancos Mar 30 '15 at 11:51
  • Let us [continue this discussion in chat](http://chat.stackoverflow.com/rooms/74178/discussion-between-damian-and-blancos). – damian Mar 31 '15 at 09:15