0

I'm processing images (using AVFoundation and OpenCV on iOS) and I want to simply display contents of CMSampleBufferRef (or IplImage) to screen.

Simply: I just want to display (like with OpenCV's cvShowImage()) non-converted image to see if I'm not dealing with corrupted or somehow deformed image.

user961912
  • 157
  • 1
  • 2
  • 11

1 Answers1

0

Sadly not. Different bitmap representation.

Perhaps you want a category? I use something along the lines of this:

 // NSImage+OpenCV.h
 @interface NSImage (OpenCV)
 + (NSImage*)imageWithCVMat:(const cv::Mat&)cvMat;
 - (id)initWithCVMat:(const cv::Mat&)cvMat;
 - (cv::Ptr<cv::Mat>)cvMat;
 @end

 // NSImage+OpenCV.m
 using namespace cv;
 @implementation NSImage (OpenCV)

 - (id)initWithCVMat:(const cv::Mat&)cvMat {
      NSData *data = [NSData dataWithBytes:cvMat.data
                                 length:cvMat.total()*cvMat.elemSize()];    
      CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
      CGColorSpaceRef colourSpace =  CGColorSpaceCreateDeviceRGB();

      CGImageRef imageRef = CGImageCreate(cvMat.cols,
                                         cvMat.rows,
                                         8,
                                         8 * cvMat.elemSize(),
                                         cvMat.step[0],
                                         colourSpace,                                           
                                         kCGImageAlphaNone |
                                         kCGBitmapByteOrderDefault,
                                         provider,
                                         NULL,
                                         false,
                                         kCGRenderingIntentDefault);       

      NSImage *image = [[NSImage alloc] initWithCGImage:imageRef size:CGSizeMake(cvMat.cols,cvMat.rows)];

      CGColorSpaceRelease(colourSpace);
      CGDataProviderRelease(provider);
      CGImageRelease(imageRef);

      return image;
 }

 +(NSImage*)imageWithCVMat:(const cv::Mat&)cvMat {
    return [[NSImage alloc] initWithCVMat:cvMat];
 }

 - (cv::Ptr<cv::Mat>)cvMat {
      CGImageSourceRef source =  CGImageSourceCreateWithData((__bridge CFDataRef)[self TIFFRepresentation],  NULL);     
      CGImageRef imageRef =  CGImageSourceCreateImageAtIndex(source, 0, NULL);
      cv::Ptr<cv::Mat> cvMat = new cv::Mat(self.size.height, self.size.width, CV_8UC4);          

      CGContextRef contextRef = CGBitmapContextCreate(cvMat->data,
                                                   cvMat->cols,
                                                   cvMat->rows,
                                                   8,
                                                   cvMat->step[0],                                                    
                                                   CGImageGetColorSpace(imageRef),
                                                   kCGImageAlphaNoneSkipLast |
                                                   kCGBitmapByteOrderDefault);   

      CGContextDrawImage(contextRef,
                        CGRectMake(0, 0, cvMat->cols, cvMat->rows),
                        imageRef);

      CGContextRelease(contextRef);
      CGImageRelease(imageRef);

      return cvMat;   
 }

 @end
Matt Melton
  • 2,493
  • 19
  • 25
  • 1
    You may need to change over the references to NSImage to UIImage to make this work with iOS, but I think the rest of the Core Graphics code there should be pretty close to what you'd need for a UIImage. – Brad Larson Apr 25 '12 at 16:52
  • Thank you for the replies. I already know about an iOS alternative (`UIImage+OpenCV`), but something straightforward like `cvShowImage()` probably don't exists... – user961912 Apr 25 '12 at 18:39