2

I am a freshman for IOS APP development, and I met a problem: " how to use vImage to convert an ARGB image to Gray image ". Actually, I have realized this conversion processing by other methods. However, I have found "vImage" could complete this work through "Accelerate Framework Reference". I have tried this method, but there is no any conversion result. Here, I attach the code, can you help me?

-(UIImage *)rgbTograyByvImage
{
    const size_t width = self.size.width * self.scale;
    const size_t height = self.size.height * self.scale;
    const size_t bytesPerRow = width * 4;
    CGColorSpaceRef space = CGColorSpaceCreateDeviceRGB();
    CGContextRef bmContext= CGBitmapContextCreate(NULL, width, height, 8, bytesPerRow, space, kCGBitmapByteOrderDefault | kCGImageAlphaPremultipliedFirst);
    CGColorSpaceRelease(space);
    if (!bmContext) {
        return nil;
    }
    CGContextDrawImage(bmContext, (CGRect){.origin.x = 0.0f, .origin.y = 0.0f, .size.width = width, .size.height = height}, self.CGImage);
    UInt32 *data = (UInt32 *)CGBitmapContextGetData(bmContext);
    if (!data) {
        CGContextRelease(bmContext);
        return nil;
    }
    int Aphal = 0;
    int Red = 1;
    int Green = 2;
    int Blue = 3;
    UInt8 *aphalSpace = malloc(sizeof(UInt8) * width * height);
    UInt8 *redSpace = malloc(sizeof(UInt8) * width * height);
    UInt8 *greenSpace = malloc(sizeof(UInt8) * width * height);
    UInt8 *blueSpace = malloc(sizeof(UInt8) * width * height);

    for (int y=0; y<height; y++) {
        for (int x=0; x<width; x++) {
            UInt8 *argbPixel = (UInt8 *) (&data[y*width+x]); 
            aphalSpace[y*width+x] = argbPixel[Aphal];
            redSpace[y*width+x] = argbPixel[Red];
            greenSpace[y*width+x] = argbPixel[Green];
            blueSpace[y*width+x] = argbPixel[Blue];
        }
    }

    vImage_Buffer argbImageBuffer = {(UInt8*)data, height, width, bytesPerRow};
    vImage_Buffer aImageBuffer = {aphalSpace, height, width, width};
    vImage_Buffer rImageBuffer = {redSpace, height, width, width};
    vImage_Buffer gImageBuffer = {greenSpace, height, width, width};
    vImage_Buffer bImageBuffer = {blueSpace, height, width, width};
    vImage_Error error;
    error = vImageConvert_ARGB8888toPlanar8(&argbImageBuffer, &aImageBuffer, &rImageBuffer, &gImageBuffer, &bImageBuffer, kvImageNoFlags);
    if (error != kvImageNoError) {
        NSLog(@"%s, vImage error %zd", __PRETTY_FUNCTION__, error);
    }
    CGImageRef grayImage = CGBitmapContextCreateImage(bmContext);
    UIImage *gray = [UIImage imageWithCGImage:grayImage];

    CGContextRelease(bmContext);
    CGImageRelease(grayImage);
    free(aphalSpace);
    free(redSpace);
    free(greenSpace);
    free(blueSpace);

    return gray;
}
Rucy Cheng
  • 21
  • 2
  • I don't think you meant kCGImageAlphaPremultipliedFirst there, a grayscale image needs no alpha channel. I think you want to use (kCGBitmapByteOrder32Host | kCGImageAlphaNoneSkipFirst) – MoDJ Dec 11 '18 at 22:03

1 Answers1

0

You want vImageMatrixMultiply_ARGB8888ToPlanar8, new for iOS 9.