0

I have a custom UITableViewCell class that I use in one of my UITableViewController classes. I pasted its code below. Sorry if it's a bit lengthy but I believe every bit of code is relevent to the question. In the custom cell class, I take an image, mask it with a UIBezierPath to make it circle, create another image that adds a border and an inner shadow (also using a UIBezierPath) and then combine the two images.

Everything works fine, except that the first image (the actual image, which is on the bottom) overflows by one pixel all around the second image (the inner shadow, which is on top). I tried to fiddle with the values of the CGRect structs but that didn't do the trick... Here is an image illustrating what I am saying:

Picture overflows

And here is the code of the custom UITableViewCell:

#import "AZTrackedUserMenuCell.h"
#import "UIImageView+AFNetworking.h"

@interface AZTrackedUserMenuCell ()

@property (nonatomic, strong) UIActivityIndicatorView *ai;

@end

@implementation AZTrackedUserMenuCell

- (void)layoutSubviews
{
    [super layoutSubviews];

    self.ai = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleGray];

    CGFloat aiWidth = self.ai.frame.size.width;
    CGFloat aiHeight = self.ai.frame.size.height;

    self.ai.frame = CGRectMake(42.0, 38.0, aiWidth, aiHeight);

    [self addSubview:self.ai];

    [self.ai startAnimating];

    [self displayImage];

    UILabel *usernameLabel = (UILabel *)[self viewWithTag:2];
    if (usernameLabel == nil) {
        usernameLabel = [[UILabel alloc] initWithFrame:CGRectMake(95.0, 15.0, 140.0, 21)];
        usernameLabel.tag = 2;
        [self addSubview:usernameLabel];
    }

    usernameLabel.font = [UIFont boldSystemFontOfSize:14.0];
    usernameLabel.textColor = [UIColor colorWithWhite:0.9 alpha:1.000];
    usernameLabel.backgroundColor = [UIColor colorWithWhite:0.0 alpha:0.0];

    if (self.user == nil) {
        self.tag = -1;
        usernameLabel.text = @"No user tracked";
    } else {
        self.tag = 0;
        usernameLabel.text = self.user[@"display_name"];
    }


    self.backgroundColor = [UIColor clearColor];
}

- (void)displayImage
{
    UIImageView *avatarImageView = [[UIImageView alloc] initWithFrame:CGRectMake(15.0, 10.0, 84.0, 84.0)];

    if (self.user == nil) {
        avatarImageView.image = [self renderImage:[UIImage imageNamed:@"defaultUserImage"] inImageView:avatarImageView];
        [self.ai stopAnimating];
        self.ai.hidden = YES;
        [self addSubview:avatarImageView];
    } else {
        NSURLRequest *avatarURLRequest = [NSURLRequest requestWithURL:[NSURL URLWithString:self.user[@"profile_image"]]];
        __weak typeof(avatarImageView) weakAvatarImageView = avatarImageView;
        [avatarImageView setImageWithURLRequest:avatarURLRequest
                               placeholderImage:[UIImage imageNamed:@"no-image"]
                                        success:^(NSURLRequest *request, NSHTTPURLResponse *response, UIImage *image) {
                                            weakAvatarImageView.image = [self renderImage:image inImageView:weakAvatarImageView];
                                            [self addSubview:weakAvatarImageView];
                                            [self.ai stopAnimating];
                                        } failure:^(NSURLRequest *request, NSHTTPURLResponse *response, NSError *error) {

                                        }];
    }

}

- (UIImage *)renderImage:(UIImage *)image inImageView:(UIImageView *)imageView
{
    UIImage *maskedImage = nil;

    UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);
    CGContextRef maskContext = UIGraphicsGetCurrentContext();

    UIBezierPath* maskOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
    CGContextSaveGState(maskContext);
    {
        [maskOvalPath addClip];

        [image drawInRect:CGRectMake(0.0, 0.0, 76.0, 76.0)];

        maskedImage = UIGraphicsGetImageFromCurrentImageContext();
    }

    CGContextRestoreGState(maskContext);

    UIGraphicsEndImageContext();

    UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);

    CGContextRef ssContext = UIGraphicsGetCurrentContext();

    UIColor* fillColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0];
    UIColor* strokeColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0.529];
    UIColor* strokeColor2 = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 1];

    UIColor* shadow = strokeColor;
    CGSize shadowOffset = CGSizeMake(0.1, -0.1);
    CGFloat shadowBlurRadius = 25.0;

    // Stroke and shadow oval path
    UIBezierPath* ssOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
    [fillColor setFill];
    [ssOvalPath fill];

    CGRect ovalBorderRect = CGRectInset(ssOvalPath.bounds, -shadowBlurRadius, -shadowBlurRadius);
    ovalBorderRect = CGRectOffset(ovalBorderRect, -shadowOffset.width, -shadowOffset.height);
    ovalBorderRect = CGRectInset(CGRectUnion(ovalBorderRect, ssOvalPath.bounds), -1, -1);

    UIBezierPath* ovalNegativePath = [UIBezierPath bezierPathWithRect: ovalBorderRect];
    [ovalNegativePath appendPath:ssOvalPath];
    ovalNegativePath.usesEvenOddFillRule = YES;

    // Stroke and shadow image
    UIImage *ss = nil;

    CGContextSaveGState(ssContext);
    {
        CGFloat xOffset = shadowOffset.width + round(ovalBorderRect.size.width);
        CGFloat yOffset = shadowOffset.height;
        CGContextSetShadowWithColor(ssContext,
                                    CGSizeMake(xOffset + copysign(0.1, xOffset), yOffset + copysign(0.1, yOffset)),
                                    shadowBlurRadius,
                                    shadow.CGColor);

        [ssOvalPath addClip];
        CGAffineTransform transform = CGAffineTransformMakeTranslation(-round(ovalBorderRect.size.width), 0);
        [ovalNegativePath applyTransform: transform];
        [[UIColor grayColor] setFill];
        [ovalNegativePath fill];

        [strokeColor2 setStroke];
        ssOvalPath.lineWidth = 3;
        [ssOvalPath stroke];

        ss = UIGraphicsGetImageFromCurrentImageContext();
    }
    CGContextRestoreGState(ssContext);

    UIGraphicsEndImageContext();

    UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);

    CGContextRef context = UIGraphicsGetCurrentContext();

    CGContextSaveGState(context);
    {
        [maskedImage drawInRect:CGRectMake(0.0, 0.0, maskedImage.size.width, maskedImage.size.height)];
        [ss drawInRect:CGRectMake(maskedImage.size.width - ss.size.width, maskedImage.size.height - ss.size.height, ss.size.width, ss.size.height)];

        imageView.image = UIGraphicsGetImageFromCurrentImageContext();
    }
    CGContextRestoreGState(context);

    UIGraphicsEndImageContext();

    return imageView.image;
}

@end

How do I remove this overflow please?

Edit 1: I logged the sizes of all three images:

DDLogInfo(@"%f, %f", maskedImage.size.width, maskedImage.size.height);
DDLogInfo(@"%f, %f", ss.size.width, ss.size.height);
DDLogInfo(@"%f, %f", imageView.image.size.width, imageView.image.size.height);

And I got the following output:

2013-07-01 20:03:47:075 Stackify[25044:c07] 84.000000, 84.000000
2013-07-01 20:03:47:075 Stackify[25044:c07] 84.000000, 84.000000
2013-07-01 20:03:47:075 Stackify[25044:c07] 84.000000, 84.000000

Edit 2: Following the advice from @peter-hosey I refactored the renderImage:inImageView: method. I don't know if the code is any better but here it is: (Note that the result is the same)

- (UIImage *)renderImage:(UIImage *)image inImageView:(UIImageView *)imageView
{
    UIGraphicsBeginImageContextWithOptions(imageView.bounds.size, NO, 0.0);
    CGContextRef maskContext = UIGraphicsGetCurrentContext();

    UIColor* fillColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0];
    UIColor* strokeColor = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 0.529];
    UIColor* strokeColor2 = [UIColor colorWithRed: 0 green: 0 blue: 0 alpha: 1];

    UIColor* shadow = strokeColor;
    CGSize shadowOffset = CGSizeMake(0.1, -0.1);
    CGFloat shadowBlurRadius = 25.0;

    UIBezierPath* ssOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
    [fillColor setFill];
    [ssOvalPath fill];

    CGRect ovalBorderRect = CGRectInset(ssOvalPath.bounds, -shadowBlurRadius, -shadowBlurRadius);
    ovalBorderRect = CGRectOffset(ovalBorderRect, -shadowOffset.width, -shadowOffset.height);
    ovalBorderRect = CGRectInset(CGRectUnion(ovalBorderRect, ssOvalPath.bounds), -1, -1);

    UIBezierPath* ovalNegativePath = [UIBezierPath bezierPathWithRect: ovalBorderRect];
    [ovalNegativePath appendPath:ssOvalPath];
    ovalNegativePath.usesEvenOddFillRule = YES;

    UIImage *ss = nil;

    UIBezierPath* maskOvalPath = [UIBezierPath bezierPathWithOvalInRect: CGRectMake(0.0, 0.0, 76.0, 76.0)];
    CGContextSaveGState(maskContext);
    {
        CGFloat xOffset = shadowOffset.width + round(ovalBorderRect.size.width);
        CGFloat yOffset = shadowOffset.height;
        CGContextSetShadowWithColor(maskContext,
                                    CGSizeMake(xOffset + copysign(0.1, xOffset), yOffset + copysign(0.1, yOffset)),
                                    shadowBlurRadius,
                                    shadow.CGColor);

        [ssOvalPath addClip];
        CGAffineTransform transform = CGAffineTransformMakeTranslation(-round(ovalBorderRect.size.width), 0);
        [ovalNegativePath applyTransform: transform];
        [[UIColor grayColor] setFill];
        [ovalNegativePath fill];

        [strokeColor2 setStroke];
        ssOvalPath.lineWidth = 3;
        [ssOvalPath stroke];

        ss = UIGraphicsGetImageFromCurrentImageContext();

        [maskOvalPath addClip];
        [image drawInRect:CGRectMake(0.0, 0.0, 76.0, 76.0)];

        UIImage *maskedImage = UIGraphicsGetImageFromCurrentImageContext();

        [maskedImage drawInRect:CGRectMake(0.0, 0.0, maskedImage.size.width, maskedImage.size.height)];
        [ss drawInRect:CGRectMake(maskedImage.size.width - ss.size.width, maskedImage.size.height - ss.size.height, ss.size.width, ss.size.height)];

        imageView.image = UIGraphicsGetImageFromCurrentImageContext();
    }

    CGContextRestoreGState(maskContext);

    UIGraphicsEndImageContext();

    return imageView.image;
}

It seems that rather than overflowing, the resulting image loses quality, but I'm not sure..

Robert Audi
  • 8,019
  • 9
  • 45
  • 67
  • How come you reference the avatar image view weakly within the block, but `self` strongly? Plus, you have no other strong reference to the avatar image view outside of that method, so the image view will get deallocated when `displayImage` returns, which means that when (if) your success block gets called, `weakAvatarImageView` will always be `nil`. – Peter Hosey Jul 01 '13 at 17:18
  • Have you tried logging the values of `maskedImage.size`, `ss.size`, and the `size` of the image you're putting in the image view? What are their sizes (most especially `ss`'s)? – Peter Hosey Jul 01 '13 at 17:21
  • Finally, is there a reason you're building three images, instead of just one in which you draw `image` and then fill `maskOvalPath` over it? – Peter Hosey Jul 01 '13 at 17:24
  • 1. Because I didn't know better 2. Yes (width and height are 84..) 3. I tried and failed but now that you say it I might do it in one go. I'll try a few more things but I have very low hopes. By the way, thanks for helping! – Robert Audi Jul 01 '13 at 17:45
  • “width and height are 84..” Width and height of what? I can see that the avatar image view is created at such size, but the images should be 8 points smaller than that, according to the `renderImage::` code. What are all three images' sizes? – Peter Hosey Jul 01 '13 at 17:56

0 Answers0