1

My goal is to mirror the screen of an iDevice to OSX, as lag-free as possible.

To my knowledge there are 2 ways to this:

  1. Airplay Mirroring (e.g. Reflector)
  2. CoreMediaIO via Lightning (e.g. Quicktime Recording)

I have chosen to pursue the second method, because (to my knowledge) connected iDevices can be recognized as DAL devices automatically after a one-time setup.

The main resource on how to do this is this blog: https://nadavrub.wordpress.com/2015/07/06/macos-media-capture-using-coremediaio/

That blog goes very deep into how to use CoreMediaIO, however it seems like you can work with AVFoundation once you have recognized the connected iDevice as an AVCaptureDevice.

This question: How to mirror iOS screen via USB? has posted a solution on how to grab each frame of the H264 (Annex B) muxxed datastream supplied by the iDevice.

However, my problem is that VideoToolbox will not correctly decode (Error Code -8969, BadData), even though there shouldn't be any difference in the code.

vtDecompressionDuctDecodeSingleFrame signalled err=-8969 (err) (VTVideoDecoderDecodeFrame returned error) at /SourceCache/CoreMedia_frameworks/CoreMedia-1562.240/Sources/VideoToolbox/VTDecompressionSession.c line 3241

Complete Code:

#import "ViewController.h"

@import CoreMediaIO;
@import AVFoundation;
@import AppKit;

@implementation ViewController

AVCaptureSession *session;
AVCaptureDeviceInput *newVideoDeviceInput;
AVCaptureVideoDataOutput *videoDataOutput;

- (void)viewDidLoad {
    [super viewDidLoad];
}

- (instancetype)initWithCoder:(NSCoder *)coder
{
    self = [super initWithCoder:coder];
    if (self) {
        // Allow iOS Devices Discovery
        CMIOObjectPropertyAddress prop =
        { kCMIOHardwarePropertyAllowScreenCaptureDevices,
            kCMIOObjectPropertyScopeGlobal,
            kCMIOObjectPropertyElementMaster };
        UInt32 allow = 1;
        CMIOObjectSetPropertyData( kCMIOObjectSystemObject,
                                  &prop, 0, NULL,
                                  sizeof(allow), &allow );

        // Get devices
        NSArray *devices = [AVCaptureDevice devicesWithMediaType:AVMediaTypeMuxed];
        BOOL deviceAttahced = false;
        for (int i = 0; i < [devices count]; i++) {
            AVCaptureDevice *device = devices[i];
            if ([[device uniqueID] isEqualToString:@"b48defcadf92f300baf5821923f7b3e2e9fb3947"]) {
                deviceAttahced = true;
                [self startSession:device];
                break;
            }
        }

    }
    return self;
}

- (void) deviceConnected:(AVCaptureDevice *)device {
    if ([[device uniqueID] isEqualToString:@"b48defcadf92f300baf5821923f7b3e2e9fb3947"]) {
        [self startSession:device];
    }
}

- (void) startSession:(AVCaptureDevice *)device {

    // Init capturing session
    session = [[AVCaptureSession alloc] init];

    // Star session configuration
    [session beginConfiguration];

    // Add session input
    NSError *error;
    newVideoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
        if (newVideoDeviceInput == nil) {
        dispatch_async(dispatch_get_main_queue(), ^(void) {
            NSLog(@"%@", error);
        });
    } else {
        [session addInput:newVideoDeviceInput];
    }

    // Add session output
    videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
    videoDataOutput.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey: (id)kCVPixelBufferPixelFormatTypeKey];

    dispatch_queue_t videoQueue = dispatch_queue_create("videoQueue", NULL);

    [videoDataOutput setSampleBufferDelegate:self queue:videoQueue];
    [session addOutput:videoDataOutput];

    // Finish session configuration
    [session commitConfiguration];

    // Start the session
    [session startRunning];
}

#pragma mark - AVCaptureAudioDataOutputSampleBufferDelegate

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
    //NSImage *resultNSImage = [self imageFromSampleBuffer:sampleBuffer];

    //self.imageView.image = [self nsImageFromSampleBuffer:sampleBuffer];
    self.imageView.image = [[NSImage alloc] initWithData:imageToBuffer(sampleBuffer)];
}    

NSData* imageToBuffer( CMSampleBufferRef source) {
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(source);
    CVPixelBufferLockBaseAddress(imageBuffer,0);

    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    void *src_buff = CVPixelBufferGetBaseAddress(imageBuffer);

    NSData *data = [NSData dataWithBytes:src_buff length:bytesPerRow * height];

    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    return data;
}  
Community
  • 1
  • 1
Guillane
  • 48
  • 6

1 Answers1

2

No, you must remove annex b start codes and replace them with size values. Same format as MP4

szatmary
  • 29,969
  • 8
  • 44
  • 57