Inflate (decompress) image data in the background, as a part of AFImageRequestOperation -responseImage (when targeting iOS)

This commit is contained in:
Mattt Thompson 2013-06-16 22:38:36 -07:00
parent e89229c557
commit 0dbf61f835

View file

@ -32,6 +32,76 @@ static dispatch_queue_t image_request_operation_processing_queue() {
return af_image_request_operation_processing_queue; return af_image_request_operation_processing_queue;
} }
#if defined(__IPHONE_OS_VERSION_MIN_REQUIRED)
static UIImage * AFInflatedImageFromResponseWithDataAtScale(NSHTTPURLResponse *response, NSData *data, CGFloat scale) {
if (!data || [data length] == 0) {
return nil;
}
CGImageRef imageRef;
CGDataProviderRef dataProvider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
NSSet *contentTypes = AFContentTypesFromHTTPHeader([[response allHeaderFields] valueForKey:@"Content-Type"]);
if ([contentTypes containsObject:@"image/png"]) {
imageRef = CGImageCreateWithPNGDataProvider(dataProvider, NULL, true, kCGRenderingIntentDefault);
} else if ([contentTypes containsObject:@"image/jpeg"]) {
imageRef = CGImageCreateWithJPEGDataProvider(dataProvider, NULL, true, kCGRenderingIntentDefault);
} else {
UIImage *image = [[UIImage alloc] initWithData:data scale:scale];
if (image.images) {
CGDataProviderRelease(dataProvider);
return image;
}
imageRef = CGImageCreateCopy([image CGImage]);
}
CGDataProviderRelease(dataProvider);
if (!imageRef) {
return nil;
}
size_t width = CGImageGetWidth(imageRef);
size_t height = CGImageGetHeight(imageRef);
size_t bitsPerComponent = CGImageGetBitsPerComponent(imageRef);
size_t bytesPerRow = CGImageGetBytesPerRow(imageRef);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGBitmapInfo bitmapInfo = CGImageGetBitmapInfo(imageRef);
int alpha = (bitmapInfo & kCGBitmapAlphaInfoMask);
if (alpha == kCGImageAlphaNone && CGColorSpaceGetNumberOfComponents(colorSpace) == 3) {
bitmapInfo &= ~kCGBitmapAlphaInfoMask;
bitmapInfo |= kCGImageAlphaNoneSkipFirst;
} else if (!(alpha == kCGImageAlphaNoneSkipFirst || alpha == kCGImageAlphaNoneSkipLast) && CGColorSpaceGetNumberOfComponents(colorSpace) == 3) {
bitmapInfo &= ~kCGBitmapAlphaInfoMask;
bitmapInfo |= kCGImageAlphaPremultipliedFirst;
}
CGContextRef context = CGBitmapContextCreate(NULL, width, height, bitsPerComponent, bytesPerRow, colorSpace, bitmapInfo);
CGColorSpaceRelease(colorSpace);
if (!context) {
CGImageRelease(imageRef);
return [[UIImage alloc] initWithData:data scale:scale];
}
CGRect rect = CGRectMake(0.0f, 0.0f, width, height);
CGContextDrawImage(context, rect, imageRef);
CGImageRef inflatedImageRef = CGBitmapContextCreateImage(context);
CGContextRelease(context);
UIImage *inflatedImage = [[UIImage alloc] initWithCGImage:inflatedImageRef scale:scale orientation:UIImageOrientationUp];
CGImageRelease(inflatedImageRef);
CGImageRelease(imageRef);
return inflatedImage;
}
#endif
@interface AFImageRequestOperation () @interface AFImageRequestOperation ()
#if defined(__IPHONE_OS_VERSION_MIN_REQUIRED) #if defined(__IPHONE_OS_VERSION_MIN_REQUIRED)
@property (readwrite, nonatomic, strong) UIImage *responseImage; @property (readwrite, nonatomic, strong) UIImage *responseImage;
@ -151,9 +221,7 @@ static dispatch_queue_t image_request_operation_processing_queue() {
#if defined(__IPHONE_OS_VERSION_MIN_REQUIRED) #if defined(__IPHONE_OS_VERSION_MIN_REQUIRED)
- (UIImage *)responseImage { - (UIImage *)responseImage {
if (!_responseImage && [self.responseData length] > 0 && [self isFinished]) { if (!_responseImage && [self.responseData length] > 0 && [self isFinished]) {
UIImage *image = [UIImage imageWithData:self.responseData]; self.responseImage = AFInflatedImageFromResponseWithDataAtScale(self.response, self.responseData, self.imageScale);
self.responseImage = [UIImage imageWithCGImage:[image CGImage] scale:self.imageScale orientation:image.imageOrientation];
} }
return _responseImage; return _responseImage;