0

This is what I tried so far for the configuration of the camera:

    AVCaptureSession *session = [[AVCaptureSession alloc] init];
    [session setSessionPreset:AVCaptureSessionPresetInputPriority];

    AVCaptureDevice *videoDevice = [AVCamViewController deviceWithMediaType:AVMediaTypeVideo preferringPosition:AVCaptureDevicePositionBack];

    NSError *errorVideo;

    AVCaptureDeviceFormat *deviceFormat = nil;
    for (AVCaptureDeviceFormat *format in videoDevice.formats) {
        CMVideoDimensions dim = CMVideoFormatDescriptionGetDimensions(format.formatDescription);

        if (dim.width == 2592 && dim.height == 1936) {
            deviceFormat = format;
            break;
        }
    }

    [videoDevice lockForConfiguration:&errorVideo];
    if (deviceFormat) {
        videoDevice.activeFormat = deviceFormat;

        if ([videoDevice isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]) {
            [videoDevice setExposureMode:AVCaptureExposureModeContinuousAutoExposure];
        }

        if ([videoDevice isAutoFocusRangeRestrictionSupported]) {
            [videoDevice setAutoFocusRangeRestriction:AVCaptureAutoFocusRangeRestrictionFar];
        }
    }
    [videoDevice unlockForConfiguration];

    AVCaptureDeviceInput *videoDeviceInput = [AVCaptureDeviceInput deviceInputWithDevice:videoDevice error:&error];

    if ([session canAddInput:videoDeviceInput]) {
        [session addInput:videoDeviceInput];
    }

    AVCaptureStillImageOutput *stillImageOutput = [[AVCaptureStillImageOutput alloc] init];

    if ([session canAddOutput:stillImageOutput]) {
        [stillImageOutput setOutputSettings:@{(id)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}];
        [session addOutput:stillImageOutput];
    }

This is what I tried for getting the UIImage from the CMSamplebuffer:

 [[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:connection completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {

        if (imageDataSampleBuffer && !error) {
            dispatch_async(dispatch_get_main_queue(), ^{
                UIImage *image = [self imageFromSampleBuffer:imageDataSampleBuffer];
            });
        }
    }];

This is a Apple example code:

- (UIImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);

CVPixelBufferLockBaseAddress(imageBuffer, 0);

void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);



// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                             bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);


// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);

// Create an image object from the Quartz image
UIImage *image = [UIImage imageWithCGImage:quartzImage];

// Release the Quartz image
CGImageRelease(quartzImage);

return (image);
}

But the image is always nil. After making some debug. I found that this function returns always nil CMSampleBufferGetImageBuffer(sampleBuffer);

Can anyone help?

Bogus
  • 263
  • 1
  • 2
  • 11

1 Answers1

1

This is because the CMSampleBufferRef must be worked on immediately as it is deallocated very quickly and efficiently.

Here is my code for generating the image:

 let connection = imageFileOutput.connectionWithMediaType(AVMediaTypeVideo)

if  connection != nil {
    imageFileOutput.captureStillImageAsynchronouslyFromConnection(connection) { [weak self] (buffer, err) -> Void in
        if CMSampleBufferIsValid(buffer) {
            let imageDataJpeg = self?.imageFromSampleBuffer(buffer)
        } else {
            print(err)
        }
    }
}

As you can see I turn it into an image while still in the scope of this function. Once it is an image I send it off for processing.

Bogus
  • 263
  • 1
  • 2
  • 11
Sean Lintern
  • 3,103
  • 22
  • 28
  • please change the jpegstillimagensdatarepresentation to the method i provided. I know it is not relevant but for posterity it could be. I specifically sad in the title uncompressed. Thanks again! – Bogus Apr 29 '16 at 10:26
  • When you say uncompressed do you mean you want the highest resolution ? if so you can just change the preset on the session to AVCaptureSessionPresetPhoto? – Sean Lintern Apr 29 '16 at 10:29
  • Apple Docs: _"On iOS the currently the only supported keys are AVVideoCodecKey and kCVPixelBufferPixelFormatTypeKey. The keys are mutually exclusive, only one may be present. The recommended values are kCMVideoCodecType_JPEG for AVVideoCodecKey and kCVPixelFormatType_420YpCbCr8BiPlanarFullRange and kCVPixelFormatType_32BGRA for kCVPixelBufferPixelFormatTypeKey."_ if one wants to have raw sampleBuffer NOT JPEG encoded sampleBuffer one should use kCVPixelBufferPixelFormatTypeKey. – Bogus Apr 29 '16 at 10:44