iOS画面模糊


增加  CoreImage.framework  CoreGraphic.framework 等库


在使用时引入:#import <Accelerate/Accelerate.h> ,支持iOS 5.0 及以上。



-(void)show

{

    UIImage* img = [self getBlurImage:[UIImage imageNamed:@"Default-568h.png"]];

    [_bgImageView setImage:img];

}


-(UIImage*)getBlurImage:(UIImage*)image

{

    return [self gaussBlur:0.2 andImage:image];

}


- (UIImage*)gaussBlur:(CGFloat)blurLevel andImage:(UIImage*)originImage

{

    blurLevel = MIN(1.0, MAX(0.0, blurLevel));


    //int boxSize = (int)(blurLevel * 0.1 * MIN(self.size.width, self.size.height));

    int boxSize = 50;//模糊度。

    boxSize = boxSize - (boxSize % 2) + 1;


    NSData *imageData = UIImageJPEGRepresentation(originImage, 1);

    UIImage *tmpImage = [UIImage imageWithData:imageData];


    CGImageRef img = tmpImage.CGImage;

    vImage_Buffer inBuffer, outBuffer;

    vImage_Error error;

    void *pixelBuffer;


    //create vImage_Buffer with data from CGImageRef

    CGDataProviderRef inProvider = CGImageGetDataProvider(img);

    CFDataRef inBitmapData = CGDataProviderCopyData(inProvider);


    inBuffer.width = CGImageGetWidth(img);

    inBuffer.height = CGImageGetHeight(img);

    inBuffer.rowBytes = CGImageGetBytesPerRow(img);


    inBuffer.data = (void*)CFDataGetBytePtr(inBitmapData);


    //create vImage_Buffer for output

    pixelBuffer = malloc(CGImageGetBytesPerRow(img) * CGImageGetHeight(img));


    outBuffer.data = pixelBuffer;

    outBuffer.width = CGImageGetWidth(img);

    outBuffer.height = CGImageGetHeight(img);

    outBuffer.rowBytes = CGImageGetBytesPerRow(img);


    NSInteger windowR = boxSize/2;

    CGFloat sig2 = windowR / 3.0;

    if(windowR>0){ sig2 = -1/(2*sig2*sig2); }


    int16_t *kernel = (int16_t*)malloc(boxSize*sizeof(int16_t));

    int32_t  sum = 0;

    for(NSInteger i=0; i<boxSize; ++i){

        kernel[i] = 255*exp(sig2*(i-windowR)*(i-windowR));

        sum += kernel[i];

    }

    free(kernel);

    // convolution

    error = vImageConvolve_ARGB8888(&inBuffer, &outBuffer, NULL, 0, 0, kernel, boxSize, 1, sum, NULL, kvImageEdgeExtend);

    error = vImageConvolve_ARGB8888(&outBuffer, &inBuffer, NULL, 0, 0, kernel, 1, boxSize, sum, NULL, kvImageEdgeExtend);

    outBuffer = inBuffer;


    if (error) {

        //NSLog(@"error from convolution %ld", error);

    }


    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();

    CGContextRef ctx = CGBitmapContextCreate(outBuffer.data,

                                             outBuffer.width,

                                             outBuffer.height,

                                             8,

                                             outBuffer.rowBytes,

                                             colorSpace,

                                             kCGBitmapAlphaInfoMask & kCGImageAlphaNoneSkipLast);

    CGImageRef imageRef = CGBitmapContextCreateImage(ctx);

    UIImage *returnImage = [UIImage imageWithCGImage:imageRef];


    //clean up

    CGContextRelease(ctx);

    CGColorSpaceRelease(colorSpace);

    free(pixelBuffer);

    CFRelease(inBitmapData);

    CGImageRelease(imageRef);


    return returnImage;

}



版权声明:本文博客原创文章,博客,未经同意,不得转载。

你可能感兴趣的:(ios)