From 0dbf61f835416fd0e0738a0b89f35ff92d225d65 Mon Sep 17 00:00:00 2001 From: Mattt Thompson Date: Sun, 16 Jun 2013 22:38:36 -0700 Subject: [PATCH] Inflate (decompress) image data in the background, as a part of AFImageRequestOperation -responseImage (when targeting iOS) --- AFNetworking/AFImageRequestOperation.m | 74 ++++++++++++++++++++++++-- 1 file changed, 71 insertions(+), 3 deletions(-) diff --git a/AFNetworking/AFImageRequestOperation.m b/AFNetworking/AFImageRequestOperation.m index 2cb4d82..064571a 100644 --- a/AFNetworking/AFImageRequestOperation.m +++ b/AFNetworking/AFImageRequestOperation.m @@ -32,6 +32,76 @@ static dispatch_queue_t image_request_operation_processing_queue() { return af_image_request_operation_processing_queue; } +#if defined(__IPHONE_OS_VERSION_MIN_REQUIRED) +static UIImage * AFInflatedImageFromResponseWithDataAtScale(NSHTTPURLResponse *response, NSData *data, CGFloat scale) { + if (!data || [data length] == 0) { + return nil; + } + + CGImageRef imageRef; + + CGDataProviderRef dataProvider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data); + + NSSet *contentTypes = AFContentTypesFromHTTPHeader([[response allHeaderFields] valueForKey:@"Content-Type"]); + if ([contentTypes containsObject:@"image/png"]) { + imageRef = CGImageCreateWithPNGDataProvider(dataProvider, NULL, true, kCGRenderingIntentDefault); + } else if ([contentTypes containsObject:@"image/jpeg"]) { + imageRef = CGImageCreateWithJPEGDataProvider(dataProvider, NULL, true, kCGRenderingIntentDefault); + } else { + UIImage *image = [[UIImage alloc] initWithData:data scale:scale]; + if (image.images) { + CGDataProviderRelease(dataProvider); + return image; + } + + imageRef = CGImageCreateCopy([image CGImage]); + } + + CGDataProviderRelease(dataProvider); + + if (!imageRef) { + return nil; + } + + size_t width = CGImageGetWidth(imageRef); + size_t height = CGImageGetHeight(imageRef); + size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef); + size_t bytesPerRow = CGImageGetBytesPerRow(imageRef); + CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); + CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef); + + int alpha = (bitmapInfo & kCGBitmapAlphaInfoMask); + if (alpha == kCGImageAlphaNone && CGColorSpaceGetNumberOfComponents(colorSpace) == 3) { + bitmapInfo &= ~kCGBitmapAlphaInfoMask; + bitmapInfo |= kCGImageAlphaNoneSkipFirst; + } else if (!(alpha == kCGImageAlphaNoneSkipFirst || alpha == kCGImageAlphaNoneSkipLast) && CGColorSpaceGetNumberOfComponents(colorSpace) == 3) { + bitmapInfo &= ~kCGBitmapAlphaInfoMask; + bitmapInfo |= kCGImageAlphaPremultipliedFirst; + } + + CGContextRef context = CGBitmapContextCreate(NULL, width, height, bitsPerComponent, bytesPerRow, colorSpace, bitmapInfo); + + CGColorSpaceRelease(colorSpace); + + if (!context) { + CGImageRelease(imageRef); + + return [[UIImage alloc] initWithData:data scale:scale]; + } + + CGRect rect = CGRectMake(0.0f, 0.0f, width, height); + CGContextDrawImage(context, rect, imageRef); + CGImageRef inflatedImageRef = CGBitmapContextCreateImage(context); + CGContextRelease(context); + + UIImage *inflatedImage = [[UIImage alloc] initWithCGImage:inflatedImageRef scale:scale orientation:UIImageOrientationUp]; + CGImageRelease(inflatedImageRef); + CGImageRelease(imageRef); + + return inflatedImage; +} +#endif + @interface AFImageRequestOperation () #if defined(__IPHONE_OS_VERSION_MIN_REQUIRED) @property (readwrite, nonatomic, strong) UIImage *responseImage; @@ -151,9 +221,7 @@ static dispatch_queue_t image_request_operation_processing_queue() { #if defined(__IPHONE_OS_VERSION_MIN_REQUIRED) - (UIImage *)responseImage { if (!_responseImage && [self.responseData length] > 0 && [self isFinished]) { - UIImage *image = [UIImage imageWithData:self.responseData]; - - self.responseImage = [UIImage imageWithCGImage:[image CGImage] scale:self.imageScale orientation:image.imageOrientation]; + self.responseImage = AFInflatedImageFromResponseWithDataAtScale(self.response, self.responseData, self.imageScale); } return _responseImage;