0

I am trying to create an HDR image from 7 images taken with different exposure values on iOS. I used the OpenCV cocoapod and followed this GitHub repository to implement HDR processing. I have got the fusion image working and it has a decent picture output, however, the HDR version produces a bad output. It somewhat looks like the image is a negative version of what it should be. See below:

enter image description here

I have tried to mess around with my code to produce 8-bit and 32-bit versions of this image since that's what I read on some other questions.

I also included my HDR.cpp file that does the merging to HDR:

cv::Mat mergeToHDR (vector<Mat>& images, vector<float>& times)
{
    imgs = images;
    Mat response;
    Ptr<CalibrateDebevec> calibrate = createCalibrateDebevec();
    calibrate->process(images, response, times);


    // Ptr<CalibrateRobertson> calibrate = createCalibrateRobertson();
    // calibrate->process(images, response, times);

    // create HDR
    Mat hdr;
    Ptr<MergeDebevec> merge_debevec = createMergeDebevec();
    merge_debevec->process(images, hdr, times, response);

    // create fusion
    // Mat fusion;
    // Ptr<MergeMertens> merge_mertens = createMergeMertens();
    // merge_mertens->process(images, fusion);

    // fusion
    // Mat fusion8bit;
    // fusion = fusion * 255;
    // fusion.convertTo(fusion8bit, CV_8U);
    // return fusion8bit;

    // hdr
    Mat hdr8bit;
    hdr = hdr * 255;
    hdr.convertTo(hdr8bit, CV_8U);
    return hdr8bit;
}

Let me know if you need any more of my code.

Here is my code to produce the Mat images:

-(cv::Mat)CVMat
{
    CGColorSpaceRef colorSpace = CGImageGetColorSpace(self.CGImage);
    CGFloat cols = self.size.width;
    CGFloat rows = self.size.height;

    cv::Mat cvMat(rows, cols, CV_8UC4); // 8 bits per component, 4 channels

    CGContextRef contextRef = CGBitmapContextCreate(cvMat.data,                 
                                                    cols,                       
                                                    rows,                       
                                                    8,                          
                                                    cvMat.step[0],                                                                      colorSpace,                 
                                                    kCGImageAlphaNoneSkipLast |
                                                    kCGBitmapByteOrderDefault); 

    CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), self.CGImage);
    CGContextRelease(contextRef);

    return cvMat;
}

- (cv::Mat)CVMat3
{
    cv::Mat result = [self CVMat];
    cv::cvtColor(result , result , CV_RGBA2RGB);
    return result;
}  

-(cv::Mat)CVGrayscaleMat
{
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceGray();
    CGFloat cols = self.size.width;
    CGFloat rows = self.size.height;

    cv::Mat cvMat(rows, cols, CV_8UC1); // 8 bits per component, 1 channels

    CGContextRef contextRef = CGBitmapContextCreate(cvMat.data,                 // Pointer to data
                                                    cols,                       // Width of bitmap
                                                    rows,                       // Height of bitmap
                                                    8,                          // Bits per component
                                                    cvMat.step[0],              // Bytes per row
                                                    colorSpace,                 // Colorspace
                                                    kCGImageAlphaNoneSkipLast |
                                                    kCGBitmapByteOrderDefault); // Bitmap info flags

    CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), self.CGImage);
    CGContextRelease(contextRef);
    CGColorSpaceRelease(colorSpace);

    return cvMat;
}

+ (UIImage *)imageWithCVMat:(const cv::Mat&)cvMat
{
    return [[UIImage alloc] initWithCVMat:cvMat];
}

- (id)initWithCVMat:(const cv::Mat&)cvMat
{
    NSData *data = [NSData dataWithBytes:cvMat.data length:cvMat.elemSize() * cvMat.total()];
    CGColorSpaceRef colorSpace;

    size_t elemSize = cvMat.elemSize();
    size_t elemSize1 = cvMat.elemSize1();

    size_t channelCount = elemSize/elemSize1;
    size_t bitsPerChannel = 8 * elemSize1;
    size_t bitsPerPixel = bitsPerChannel * channelCount;

    if (channelCount == 1) {
        colorSpace = CGColorSpaceCreateDeviceGray();
    } else {
        colorSpace = CGColorSpaceCreateDeviceRGB();
    }

    // Tell CGIImageRef different bitmap info if handed 32-bit
    uint32_t bitmapInfo = kCGImageAlphaNone | kCGBitmapByteOrderDefault;

    if (bitsPerChannel == 32 ){
        bitmapInfo = kCGImageAlphaNoneSkipLast | kCGBitmapFloatComponents | kCGBitmapByteOrder32Little;
    }

    CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);

    // Creating CGImage from cv::Mat
    CGImageRef imageRef = CGImageCreate(cvMat.cols,                                 //width
                                        cvMat.rows,                                 //height
                                        bitsPerChannel,                             //bits per component
                                        bitsPerPixel,                               //bits per pixel
                                        cvMat.step[0],                              //bytesPerRow
                                        colorSpace,                                 //colorspace
                                        bitmapInfo,                                 // bitmap info
                                        provider,                                   //CGDataProviderRef
                                        NULL,                                       //decode
                                        false,                                      //should interpolate
                                        kCGRenderingIntentDefault                   //intent
                                        );                     

    // Getting UIImage from CGImage
    self = [self initWithCGImage:imageRef];
    CGImageRelease(imageRef);
    CGDataProviderRelease(provider);
    CGColorSpaceRelease(colorSpace);

    return self;
}
  • It looks like there is an issue with your colorspace. iOS and OpenCV do a lot of things differently, so it is easy to mix up something like RGB (iOS) to BGR (OpenCV). Maybe look into that? – CodeBender Nov 20 '17 at 19:39
  • I have looked into that and the way I do it is depending on the number of channels, I either use CGColorSpaceCreateDeviceGray() or CGColorSpaceCreateDeviceRGB(). I couldn't find anything in my code that could mess up BGR and RGB. Also, if there was an issue with colorspace, wouldn't the fusion image look bad too? Because the colors on that one look fine. – mateishungary Nov 21 '17 at 16:29
  • We'd really need to see your code to produce the Mat images - depending on where the images are coming from the colorspace may be BGR or RGB in the source. – jpetrichsr Nov 21 '17 at 18:38
  • @jpetrichsr I just added my code that produces the Mat images. Hope that helps. – mateishungary Nov 21 '17 at 19:40

0 Answers0