nitin k m p nitin k m p - 5 months ago 49
iOS Question

CGBitmapContextCreateImage memory leak

This line of code, which uses CGBitmapContextCreateImage from the Quartz framework, creates a major memory leak:

CGImageRef imgRef = CGBitmapContextCreateImage(bitmapContext);


Is there any alternative method or code to replace this instead.

The entire method is as follows:

- (void)captureImageWithCompletionHander:(void(^)(NSString *imageFilePath))completionHandler
{
dispatch_suspend(_captureQueue);
AVCaptureConnection *videoConnection = nil;
for (AVCaptureConnection *connection in self.stillImageOutput.connections)
{
for (AVCaptureInputPort *port in [connection inputPorts])
{
if ([[port mediaType] isEqual:AVMediaTypeVideo] )
{
videoConnection = connection;
break;
}
}
if (videoConnection) break;
}
__weak typeof(self) weakSelf = self;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
{
if (error)
{
dispatch_resume(_captureQueue);
return;
}
__block NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:@"Image 1"];
@autoreleasepool
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
CIImage *ciImage = [[CIImage alloc] initWithData:imageData options:@{kCIImageColorSpace:[NSNull null]}];
enhancedImage = [self filteredImageUsingContrastFilterOnImage:ciImage];
if (weakSelf.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence))
{
CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]];
if (rectangleFeature)
{
[self correctPerspectiveForImageWithFeatures:rectangleFeature];
}
}
CIFilter *transform = [CIFilter filterWithName:@"CIAffineTransform"];
[transform setValue:enhancedImage forKey:kCIInputImageKey];
NSValue *rotation = [NSValue valueWithCGAffineTransform:CGAffineTransformMakeRotation(-90 * (M_PI/180))];
[transform setValue:rotation forKey:@"inputTransform"];
enhancedImage = [transform outputImage];
if (!enhancedImage || CGRectIsEmpty(enhancedImage.extent)) return;
static CIContext *ctx = nil;
if (!ctx)
{
ctx = [CIContext contextWithOptions:@{kCIContextWorkingColorSpace:[NSNull null]}];
}
CGSize bounds = enhancedImage.extent.size;
bounds = CGSizeMake(floorf(bounds.width / 4) * 4,floorf(bounds.height / 4) * 4);
CGRect extent = CGRectMake(enhancedImage.extent.origin.x, enhancedImage.extent.origin.y, bounds.width, bounds.height);
static int bytesPerPixel = 8;
uint rowBytes = bytesPerPixel * bounds.width;
uint totalBytes = rowBytes * bounds.height;
uint8_t *byteBuffer = (uint8_t *)malloc(totalBytes);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
[ctx render:enhancedImage toBitmap:byteBuffer rowBytes:rowBytes bounds:extent format:kCIFormatRGBA8 colorSpace:colorSpace];
CGContextRef bitmapContext = CGBitmapContextCreate(byteBuffer,bounds.width,bounds.height,bytesPerPixel,rowBytes,colorSpace,kCGImageAlphaNoneSkipLast);
free(byteBuffer);

CGImageRef imgRef = CGBitmapContextCreateImage(bitmapContext);

CGContextRelease(bitmapContext);
CGColorSpaceRelease(colorSpace);
saveCGImageAsJPEGToFilePath(imgRef, filePath);
CFRelease(imgRef);
dispatch_async(dispatch_get_main_queue(), ^
{
completionHandler(filePath);
dispatch_resume(_captureQueue);
});
_imageDedectionConfidence = 0.0f;
}
}];


}

Answer

I fixed the issue by making the code a lot more lean and remove some unnecessary portions, including the above leaking line of code.

    - (void)captureImageWithCompletionHander:(void(^)(NSString *imageFilePath))completionHandler
    {
        dispatch_suspend(_captureQueue);
        AVCaptureConnection *videoConnection = nil;
        for (AVCaptureConnection *connection in self.stillImageOutput.connections)
        {
        for (AVCaptureInputPort *port in [connection inputPorts])
        {
        if ([[port mediaType] isEqual:AVMediaTypeVideo] )
        {
            videoConnection = connection;
            break;
        }
        }
    if (videoConnection) break;
    }
__weak typeof(self) weakSelf = self;
[self.stillImageOutput captureStillImageAsynchronouslyFromConnection:videoConnection completionHandler: ^(CMSampleBufferRef imageSampleBuffer, NSError *error)
 {
     if (error)
     {
         dispatch_resume(_captureQueue);
         return;
     }
     __block NSString *filePath = [NSTemporaryDirectory() stringByAppendingPathComponent:@"Image 1"];
     @autoreleasepool
     {
         NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageSampleBuffer];
         UIImage *img = [UIImage imageWithData:imageData];imageData=nil;
         CGSize newSize = CGSizeMake(img.size.width, img.size.height);
         UIGraphicsBeginImageContext( newSize );
         [img drawInRect:CGRectMake(0,0,newSize.width,newSize.height)];
         UIImage* newImage = UIGraphicsGetImageFromCurrentImageContext();
         UIGraphicsEndImageContext();
         CIImage *ciImage = [[CIImage alloc]initWithImage:newImage];             
         enhancedImage = [self filteredImageUsingContrastFilterOnImage:ciImage];
         if (weakSelf.isBorderDetectionEnabled && rectangleDetectionConfidenceHighEnough(_imageDedectionConfidence))
         {
             CIRectangleFeature *rectangleFeature = [self biggestRectangleInRectangles:[[self highAccuracyRectangleDetector] featuresInImage:enhancedImage]];
             if (rectangleFeature)
             {
                 enhancedImage = [self correctPerspectiveForImage:enhancedImage withFeatures:rectangleFeature];
             }
         }
         if (!enhancedImage || CGRectIsEmpty(enhancedImage.extent)) return;
         CGRect rect;
         rect = [enhancedImage extent];
         if (!ctx)
         {
             ctx = [CIContext contextWithOptions:@{kCIContextWorkingColorSpace:[NSNull null]}];
         }             
         CGImageRef imgRef = [ctx createCGImage:enhancedImage fromRect:rect];             
         saveCGImageAsJPEGToFilePath(imgRef, filePath);             
         CFRelease(imgRef);             
         dispatch_async(dispatch_get_main_queue(), ^
                        {
                            completionHandler(filePath);
                            dispatch_resume(_captureQueue);
                        });
         _imageDedectionConfidence = 0.0f;
     }
 }];
}