-1

On osx i use AVFoundation to capture image from a USB camera, all work fine, but the image I get is darker compared to live video.

Device capture configuration

-(BOOL)prepareCapture{
captureSession = [[AVCaptureSession alloc] init];
NSError *error;

imageOutput=[[AVCaptureStillImageOutput alloc] init];
NSNumber * pixelFormat = [NSNumber numberWithInt:k32BGRAPixelFormat];
[imageOutput setOutputSettings:[NSDictionary dictionaryWithObject:pixelFormat forKey:(id)kCVPixelBufferPixelFormatTypeKey]];

videoOutput=[[AVCaptureMovieFileOutput alloc] init];

AVCaptureDeviceInput *videoInput = [AVCaptureDeviceInput deviceInputWithDevice:MyVideoDevice error:&error];
if (videoInput) {
    [captureSession beginConfiguration];
    [captureSession addInput:videoInput];
    [captureSession setSessionPreset:AVCaptureSessionPresetHigh];
    //[captureSession setSessionPreset:AVCaptureSessionPresetPhoto];
    [captureSession addOutput:imageOutput];
    [captureSession addOutput:videoOutput];
    [captureSession commitConfiguration];
}
else {
    // Handle the failure.
    return NO;
}
return YES;
}

Add view for live preview

-(void)settingPreview:(NSView*)View{
// Attach preview to session
previewView = View;
CALayer *previewViewLayer = [previewView layer];
[previewViewLayer setBackgroundColor:CGColorGetConstantColor(kCGColorBlack)];
AVCaptureVideoPreviewLayer *newPreviewLayer = [[AVCaptureVideoPreviewLayer alloc] initWithSession:captureSession];
[newPreviewLayer setFrame:[previewViewLayer bounds]];
[newPreviewLayer setAutoresizingMask:kCALayerWidthSizable | kCALayerHeightSizable];
[previewViewLayer addSublayer:newPreviewLayer];
//[self setPreviewLayer:newPreviewLayer];
[captureSession startRunning];
}

Code to capture the image

-(void)captureImage{
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in imageOutput.connections) {
    for (AVCaptureInputPort *port in [connection inputPorts]) {
        if ([[port mediaType] isEqual:AVMediaTypeVideo] ) {
            videoConnection = connection;
            break;
        }
    }
    if (videoConnection) { break; }
}
[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler:
 ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
     CFDictionaryRef exifAttachments =
     CMGetAttachment(imageSampleBuffer, kCGImagePropertyExifDictionary, NULL);
     if (exifAttachments) {
         // Do something with the attachments.
     }
     // Continue as appropriate.
     //IMG is a global NSImage
     IMG = [self imageFromSampleBuffer:imageSampleBuffer];
     [[self delegate] imageReady:IMG];
}];
}

Create a NSImage from sample buffer data, i think the problem is here

- (NSImage *) imageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer
{
// Get a CMSampleBuffer's Core Video image buffer for the media data
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
// Lock the base address of the pixel buffer
CVPixelBufferLockBaseAddress(imageBuffer, 0);

// Get the number of bytes per row for the pixel buffer
void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer);

// Get the number of bytes per row for the pixel buffer
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
// Get the pixel buffer width and height
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);

// Create a device-dependent RGB color space
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

// Create a bitmap graphics context with the sample buffer data
CGContextRef context = CGBitmapContextCreate(baseAddress, width, height, 8,
                                             bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
// Create a Quartz image from the pixel data in the bitmap graphics context
CGImageRef quartzImage = CGBitmapContextCreateImage(context);
// Unlock the pixel buffer
CVPixelBufferUnlockBaseAddress(imageBuffer,0);

// Free up the context and color space
CGContextRelease(context);
CGColorSpaceRelease(colorSpace);

// Create an image object from the Quartz image
//UIImage *image = [UIImage imageWithCGImage:quartzImage];
NSImage * image = [[NSImage alloc] initWithCGImage:quartzImage size:NSZeroSize];
// Release the Quartz image
CGImageRelease(quartzImage);

return (image);
}
Mex
  • 196
  • 4
  • 16
  • I don't see anything sticking out in a quick look through your code... but one reason AVCapture images can come out dark is that the camera takes a bit of time to automatically adapt focus, exposure, etc. Are you perhaps calling your `captureImage` method right after your `settingPreview` method that starts running the capture session? – rickster Dec 01 '16 at 18:00
  • The code is copied largely from apple examples. **settingPreview** is called when the program starts. The point is that the saved image is a little more darker, the difference with the live is little, as if I decrease slightly the brightness. I think the problem occurs in the conversion from **CMSampleBufferRef** to **NSImage** – Mex Dec 01 '16 at 18:12

2 Answers2

0

Solution found

The problem was in imageFromSampleBuffer I used this code and the picture is perfect

// Continue as appropriate.
     //IMG = [self imageFromSampleBuffer:imageSampleBuffer];

     CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);

     if (imageBuffer) {
         CVBufferRetain(imageBuffer);

         NSCIImageRep* imageRep = [NSCIImageRep imageRepWithCIImage: [CIImage imageWithCVImageBuffer: imageBuffer]];

         IMG = [[NSImage alloc] initWithSize: [imageRep size]];
         [IMG addRepresentation: imageRep];

         CVBufferRelease(imageBuffer);
     }

Code found in this answer

Community
  • 1
  • 1
Mex
  • 196
  • 4
  • 16
0

In my case, you still need to call captureStillImageAsynchronouslyFromConnection: multiple times to force the built-in camera to expose properly.

int primeCount = 8;  //YMMV
for (int i = 0; i < primeCount; i++) { 
    [imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {}];
}

[imageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(imageSampleBuffer);
    if (imageBuffer) {
        CVBufferRetain(imageBuffer);

        NSCIImageRep* imageRep = [NSCIImageRep imageRepWithCIImage: [CIImage imageWithCVImageBuffer: imageBuffer]];
        IMG = [[NSImage alloc] initWithSize: [imageRep size]];
        [IMG addRepresentation: imageRep];
    }
}];
Carlito
  • 319
  • 2
  • 4