2

i am using ReplayKit for screen recording and i need to get a hold of the video instead of presenting it in the viewController, i have tried the following:

- (void)stopScreenRecording {
    RPScreenRecorder *sharedRecorder = RPScreenRecorder.sharedRecorder;
    [sharedRecorder stopRecordingWithHandler:^(RPPreviewViewController *previewViewController, NSError *error) {
        if (error) {
            NSLog(@"stopScreenRecording: %@", error.localizedDescription);
        }

        if (previewViewController) {
            previewViewController.previewControllerDelegate = self;
            self.previewViewController = previewViewController;

            // RPPreviewViewController only supports full screen modal presentation.
            //self.previewViewController.modalPresentationStyle = UIModalPresentationFullScreen;

           // [self presentViewController:previewViewController animated:YES completion:nil];

            NSURL *aMovieUrl = [previewViewController valueForKey:@"movieURL"];
            [self writeVideoToAlbum:aMovieUrl];
        }
    }];

}
- (void)writeVideoToAlbum:(NSURL *)assetURL{
    __block PHObjectPlaceholder *placeholder;

    [[PHPhotoLibrary sharedPhotoLibrary] performChanges:^{
        PHAssetChangeRequest* createAssetRequest = [PHAssetChangeRequest creationRequestForAssetFromVideoAtFileURL:assetURL];
        placeholder = [createAssetRequest placeholderForCreatedAsset];

    } completionHandler:^(BOOL success, NSError *error) {
        if (success)
        {
            NSLog(@"Video successfully saved!");

        }
        else
        {
            NSLog(@"%@", error);
        }
    }];
}

but it doesn't seem to work, any suggestions would be much appreciated.

user3411226
  • 183
  • 1
  • 14

1 Answers1

6

You can use following function to record screen with ReplayKit, will write video into NSDocumentDirectory.

For following code you can record video screens as well.

@property (strong, nonatomic) RPScreenRecorder *screenRecorder;
@property (strong, nonatomic) AVAssetWriter *assetWriter;
@property (strong, nonatomic) AVAssetWriterInput *assetWriterInput;

- (IBAction)startScreenRecording:(UIButton *)button {
    self.screenRecorder = [RPScreenRecorder sharedRecorder];
    if (self.screenRecorder.isRecording) {
        return;
    }
    NSError *error = nil;
    NSArray *pathDocuments = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
    NSString *outputURL = pathDocuments[0];

    NSString *videoOutPath = [[outputURL stringByAppendingPathComponent:[NSString stringWithFormat:@"%u", arc4random() % 1000]] stringByAppendingPathExtension:@"mp4"];
    self.assetWriter = [AVAssetWriter assetWriterWithURL:[NSURL fileURLWithPath:videoOutPath] fileType:AVFileTypeMPEG4 error:&error];

    NSDictionary *compressionProperties = @{AVVideoProfileLevelKey         : AVVideoProfileLevelH264HighAutoLevel,
                                            AVVideoH264EntropyModeKey      : AVVideoH264EntropyModeCABAC,
                                            AVVideoAverageBitRateKey       : @(1920 * 1080 * 11.4),
                                            AVVideoMaxKeyFrameIntervalKey  : @60,
                                            AVVideoAllowFrameReorderingKey : @NO};
    NSNumber* width= [NSNumber numberWithFloat:self.view.frame.size.width];
    NSNumber* height = [NSNumber numberWithFloat:self.view.frame.size.height];

    if (@available(iOS 11.0, *)) {
        NSDictionary *videoSettings = @{AVVideoCompressionPropertiesKey : compressionProperties,
                                        AVVideoCodecKey                 : AVVideoCodecTypeH264,
                                        AVVideoWidthKey                 : width,
                                        AVVideoHeightKey                : height};

        self.assetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];
    } else {
        // Fallback on earlier versions
    }

    [self.assetWriter addInput:self.assetWriterInput];
    [self.assetWriterInput setMediaTimeScale:60];
    [self.assetWriter setMovieTimeScale:60];
    [self.assetWriterInput setExpectsMediaDataInRealTime:YES];

    if (@available(iOS 11.0, *)) {
        [AVCaptureDevice requestAccessForMediaType:AVMediaTypeVideo completionHandler:^(BOOL granted) {
            dispatch_async(dispatch_get_main_queue(), ^{
                if (granted)
                {
                    [self.screenRecorder setMicrophoneEnabled:YES];
                    [self.screenRecorder startCaptureWithHandler:^(CMSampleBufferRef  _Nonnull sampleBuffer, RPSampleBufferType bufferType, NSError * _Nullable error) {
                        if (CMSampleBufferDataIsReady(sampleBuffer)) {
                            if (self.assetWriter.status == AVAssetWriterStatusUnknown && bufferType == RPSampleBufferTypeVideo) {
                                [self.assetWriter startWriting];
                                [self.assetWriter startSessionAtSourceTime:CMSampleBufferGetPresentationTimeStamp(sampleBuffer)];
                            }

                            if (self.assetWriter.status == AVAssetWriterStatusFailed) {
                                NSLog(@"An error occured.");
                                //show alert
                                [[RPScreenRecorder sharedRecorder] stopCaptureWithHandler:^(NSError * _Nullable error) {}];
                                return;
                            }
                            if (bufferType == RPSampleBufferTypeVideo) {
                                if (self.assetWriterInput.isReadyForMoreMediaData) {
                                    [self.assetWriterInput appendSampleBuffer:sampleBuffer];
                                }else{
                                    NSLog(@"Not ready for video");
                                }
                            }
                        }
                    } completionHandler:^(NSError * _Nullable error) {
                        if (!error) {
                            AVAudioSession *session = [AVAudioSession sharedInstance];
                            [session setActive:YES error:nil];
                            // Start recording
                            NSLog(@"Recording started successfully.");
                        }else{
                            //show alert
                        }
                    }];
                }
            });
        }];


    } else {
        // Fallback on earlier versions
    }

} 

- (IBAction)stopScreenRecording:(UIButton *)button {

    if (@available(iOS 11.0, *)) {
        dispatch_async(dispatch_get_main_queue(), ^{
            [[RPScreenRecorder sharedRecorder] stopCaptureWithHandler:^(NSError * _Nullable error) {
                if (!error) {
                    NSLog(@"Recording stopped successfully. Cleaning up...");
                    [self.assetWriterInput markAsFinished];
                    [self.assetWriter finishWritingWithCompletionHandler:^{
                        NSLog(@"File Url:  %@",self.assetWriter.outputURL);
                        self.assetWriterInput = nil;
                        self.assetWriter = nil;
                        self.screenRecorder = nil;
                    }];
                }
            }];
        });


    } else {
        // Fallback on earlier versions
        NSLog(@"hello");
    }
}
Rahul_Chandnani
  • 275
  • 2
  • 9
  • seems like a good solution but what are telestrationAudioRecorder and unhideButtonsAfterTelestration? – user3411226 Nov 20 '17 at 13:09
  • There was an functionality on my code, just for record audio separately. nothing related with this question. removed those lines. Thanks! – Rahul_Chandnani Nov 20 '17 at 13:15
  • 1
    already have, and when running the code i got the following error: [AVAssetWriterInput appendSampleBuffer:] Media type of sample buffer must match receiver's media type ("vide")' – user3411226 Nov 20 '17 at 13:16
  • opps i miss to remove OR condition please use now updated answer. if (bufferType == RPSampleBufferTypeVideo) only. – Rahul_Chandnani Nov 20 '17 at 13:21
  • Most Welcome :-) – Rahul_Chandnani Nov 20 '17 at 14:17
  • can this be customized to point the recorder to a certain view in my app, in case i only want to record that part of my screen? – user3411226 Nov 20 '17 at 14:25
  • To record particular view we have many public repo on github. Or after creating a video from ReplayKit you can crop the video as a per your requirement. I’m doing same. – Rahul_Chandnani Nov 20 '17 at 16:51
  • Do you have the same code in Swift 3 or 4 please ? Because it seems in Swift version there's no startCapture function – Ghiggz Pikkoro Jan 05 '18 at 13:15
  • let videoSettings = [AVVideoCompressionPropertiesKey: compressionProperties, AVVideoCodecKey: .h264, AVVideoWidthKey: width, AVVideoHeightKey: height] // Error: Type of expression is ambiguous without more context self.screenRecorder.startCapture(handler: {(_ sampleBuffer: CMSampleBuffer?, _ bufferType: RPSampleBufferType, _ error: Error?) -> Void in // Value of type 'RPScreenRecorder?' has no member 'startCapture' – Ghiggz Pikkoro Jan 05 '18 at 13:18
  • @Rahul_Chandnani : Are you able to get correct frames ? As i did the same which you mentioned. I am doing some drawing on screen and frames seems to be flickry in the output video. – Purushottam Sain Mar 20 '18 at 13:54
  • @PurushottamSain: yes i'm getting correct frames. I'm also doing drawing into screen. – Rahul_Chandnani Mar 21 '18 at 12:28
  • So any suggestions ? – Purushottam Sain Mar 21 '18 at 12:41
  • share you output video via any drive link, after seen your resultant video and code i can help you. @PurushottamSain – Rahul_Chandnani Mar 21 '18 at 12:47
  • Thanks will do today. – Purushottam Sain Mar 22 '18 at 10:23