UIImage+ColorAtPixelcategory
#import <UIKit/UIKit.h>
/*
A category on UIImage that enables you to query the color value of arbitrary
pixels of the image.
*/
@interface UIImage (ColorAtPixel)
- (UIColor *)colorAtPixel:(CGPoint)point;
@end
#import <CoreGraphics/CoreGraphics.h>
#import "UIImage+ColorAtPixel.h"
@implementation UIImage (ColorAtPixel)
/*
Returns the color of the image pixel at point. Returns nil if point lies outside the image bounds.
If the point coordinates contain decimal parts, they will be truncated.
To get at the pixel data, this method must draw the image into a bitmap context.
For minimal memory usage and optimum performance, only the specific requested
pixel is drawn.
If you need to query pixel colors for the same image repeatedly (e.g., in a loop),
this approach is probably less efficient than drawing the entire image into memory
once and caching it.
*/
- (UIColor *)colorAtPixel:(CGPoint)point {
// Cancel if point is outside image coordinates
if (!CGRectContainsPoint(CGRectMake(0.0f, 0.0f, self.size.width, self.size.height), point)) {
return nil;
}
// Create a 1x1 pixel byte array and bitmap context to draw the pixel into.
// Reference: http://stackoverflow.com/questions/1042830/retrieving-a-pixel-alpha-value-for-a-uiimage
NSInteger pointX = trunc(point.x);
NSInteger pointY = trunc(point.y);
CGImageRef cgImage = self.CGImage;
NSUInteger width = CGImageGetWidth(cgImage);
NSUInteger height = CGImageGetHeight(cgImage);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
int bytesPerPixel = 4;
int bytesPerRow = bytesPerPixel * 1;
NSUInteger bitsPerComponent = 8;
unsigned char pixelData[4] = { 0, 0, 0, 0 };
CGContextRef context = CGBitmapContextCreate(pixelData,
1,
1,
bitsPerComponent,
bytesPerRow,
colorSpace,
kCGImageAlphaPremultipliedLast | kCGBitmapByteOrder32Big);
CGColorSpaceRelease(colorSpace);
CGContextSetBlendMode(context, kCGBlendModeCopy);
// Draw the pixel we are interested in onto the bitmap context
CGContextTranslateCTM(context, -pointX, -pointY);
CGContextDrawImage(context, CGRectMake(0.0f, 0.0f, (CGFloat)width, (CGFloat)height), cgImage);
CGContextRelease(context);
// Convert color values [0..255] to floats [0.0..1.0]
CGFloat red = (CGFloat)pixelData[0] / 255.0f;
CGFloat green = (CGFloat)pixelData[1] / 255.0f;
CGFloat blue = (CGFloat)pixelData[2] / 255.0f;
CGFloat alpha = (CGFloat)pixelData[3] / 255.0f;
return [UIColor colorWithRed:red green:green blue:blue alpha:alpha];
}
@end
Custom button
#define kAlphaVisibleThreshold (0.1f);
@interface OBShapedButton : UIButton {
// Our class interface is empty. OBShapedButton only overwrites one method of UIView.
// It has no attributes of its own.
}
@end
#import "OBShapedButton.h"
#import "UIImage+ColorAtPixel.h"
@implementation OBShapedButton
// UIView uses this method in hitTest:withEvent: to determine which subview should receive a touch event.
// If pointInside:withEvent: returns YES, then the subview’s hierarchy is traversed; otherwise, its branch
// of the view hierarchy is ignored.
- (BOOL)pointInside:(CGPoint)point withEvent:(UIEvent *)event {
// Return NO if even super returns NO (i.e., if point lies outside our bounds)
BOOL superResult = [super pointInside:point withEvent:event];
if (!superResult) {
return superResult;
}
// We can't test the image's alpha channel if the button has no image. Fall back to super.
UIImage *buttonImage = [self backgroundImageForState:UIControlStateNormal];
if (buttonImage == nil) {
return YES;
}
CGColorRef pixelColor = [[buttonImage1 colorAtPixel:point] CGColor];//[[buttonImage1 colorAtPixel:point] CGColor];
CGFloat alpha = CGColorGetAlpha(pixelColor);
return alpha >= kAlphaVisibleThreshold;
}
@end