How do I get the average color of the non-transparent parts of an image?
I get that you can get the average color of an image using the block of code below, but it also factors in the transparent parts to the calculation.
var averageColor: UIColor? {
guard let inputImage = CIImage(image: self) else { return nil }
let extentVector = CIVector (
x: inputImage.extent.origin.x,
y: inputImage.extent.origin.y,
z: inputImage.extent.size.width,
w: inputImage.extent.size.height)
guard let filter = CIFilter (
name: "CIAreaAverage",
withInputParameters: [
kCIInputImageKey: inputImage,
kCIInputExtentKey: extentVector
]) else { return nil }
guard let outputImage = filter.outputImage else { return nil }
var bitmap = [UInt8](repeating: 0, count: 4)
let context = CIContext(options: [kCIContextWorkingColorSpace: kCFNull])
context.render (
outputImage,
toBitmap: &bitmap,
rowBytes: 4,
bounds: CGRect(x: 0, y: 0, width: 1, height: 1),
format: kCIFormatRGBA8,
colorSpace: nil
)
return UIColor (
red: CGFloat(bitmap[0]) / 255,
green: CGFloat(bitmap[1]) / 255,
blue: CGFloat(bitmap[2]) / 255,
alpha: CGFloat(bitmap[3]) / 255
)
}