2012-06-26 1 views
2

일부 기본 검색을 수행하는 iOS 앱을 구축하고 있습니다. AVCaptureVideoDataOutput에서 원시 프레임을 가져 와서 CMSampleBufferRef를 UIImage로 변환하고 UIImage의 크기를 조정 한 다음 CVPixelBufferRef로 변환합니다. Instruments에서 탐지 할 수있는 한, 누출 부분은 CGImage를 CVPixelBufferRef로 변환하는 마지막 부분입니다. CoreImage/CoreVideo에서 메모리 누수가 발생했습니다.

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection 
{ 
    videof = [[ASMotionDetect alloc] initWithSampleImage:[self resizeSampleBuffer:sampleBuffer]]; 
    // ASMotionDetect is my class for detection and I use videof to calculate the movement 
} 

-(UIImage*)resizeSampleBuffer:(CMSampleBufferRef) sampleBuffer { 
    UIImage *img; 
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    CVPixelBufferLockBaseAddress(imageBuffer,0);  // Lock the image buffer 

    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); // Get information of the image 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer); 
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 

    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
    CGImageRef newImage = CGBitmapContextCreateImage(newContext); 
    CGContextRelease(newContext); 

    CGColorSpaceRelease(colorSpace); 
    CVPixelBufferUnlockBaseAddress(imageBuffer,0); 
    /* CVBufferRelease(imageBuffer); */ // do not call this! 

    img = [UIImage imageWithCGImage:newImage]; 
    CGImageRelease(newImage); 
    newContext = nil; 
    img = [self resizeImageToSquare:img]; 
    return img; 
} 

-(UIImage*)resizeImageToSquare:(UIImage*)_temp { 
    UIImage *img; 
    int w = _temp.size.width; 
    int h = _temp.size.height; 
    CGRect rect; 
    if (w>h) { 
     rect = CGRectMake((w-h)/2,0,h,h); 
    } else { 
     rect = CGRectMake(0, (h-w)/2, w, w); 
    } 
    // 
    img = [self crop:_temp inRect:rect]; 
    return img; 
} 

-(UIImage*) crop:(UIImage*)image inRect:(CGRect)rect{ 
    UIImage *sourceImage = image; 
    CGRect selectionRect = rect; 
    CGRect transformedRect = TransformCGRectForUIImageOrientation(selectionRect, sourceImage.imageOrientation, sourceImage.size); 
    CGImageRef resultImageRef = CGImageCreateWithImageInRect(sourceImage.CGImage, transformedRect); 
    UIImage *resultImage = [[UIImage alloc] initWithCGImage:resultImageRef scale:1.0 orientation:image.imageOrientation]; 
    CGImageRelease(resultImageRef); 
    return resultImage; 
} 

그리고 내 탐지 클래스의

내가 가진 : 여기

내가 사용하는 코드의 악기와

- (id)initWithSampleImage:(UIImage*)sampleImage { 
    if ((self = [super init])) { 
    _frame = new CVMatOpaque(); 
    _histograms = new CVMatNDOpaque[kGridSize * 
            kGridSize]; 
    [self extractFrameFromImage:sampleImage]; 
    } 
    return self; 
} 

- (void)extractFrameFromImage:(UIImage*)sampleImage { 
    CGImageRef imageRef = [sampleImage CGImage]; 
    CVImageBufferRef imageBuffer = [self pixelBufferFromCGImage:imageRef]; 
    CVPixelBufferLockBaseAddress(imageBuffer, 0); 
    // Collect some information required to extract the frame. 
    void *baseAddress = CVPixelBufferGetBaseAddress(imageBuffer); 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer); 
    size_t width = CVPixelBufferGetWidth(imageBuffer); 

    // Extract the frame, convert it to grayscale, and shove it in _frame. 
    cv::Mat frame(height, width, CV_8UC4, baseAddress, bytesPerRow); 
    cv::cvtColor(frame, frame, CV_BGR2GRAY); 
    _frame->matrix = frame; 
    CVPixelBufferUnlockBaseAddress(imageBuffer, 0); 
    CGImageRelease(imageRef); 
} 

- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image 
{ 
    CVPixelBufferRef pxbuffer = NULL; 
    int width = CGImageGetWidth(image)*2; 
    int height = CGImageGetHeight(image)*2; 

    NSMutableDictionary *attributes = [NSMutableDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, [NSNumber numberWithInt:width], kCVPixelBufferWidthKey, [NSNumber numberWithInt:height], kCVPixelBufferHeightKey, nil]; 
    CVPixelBufferPoolRef pixelBufferPool; 
    CVReturn theError = CVPixelBufferPoolCreate(kCFAllocatorDefault, NULL, (__bridge CFDictionaryRef) attributes, &pixelBufferPool); 
    NSParameterAssert(theError == kCVReturnSuccess); 
    CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, pixelBufferPool, &pxbuffer); 
    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); 

    CVPixelBufferLockBaseAddress(pxbuffer, 0); 
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); 
    NSParameterAssert(pxdata != NULL); 
    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); 
    CGContextRef context = CGBitmapContextCreate(pxdata, width, 
               height, 8, width*4, rgbColorSpace, 
               kCGImageAlphaNoneSkipFirst); 
    NSParameterAssert(context); 
/* here is the problem: */ 
    CGContextDrawImage(context, CGRectMake(0, 0, width, height), image); 
    CGColorSpaceRelease(rgbColorSpace); 
    CGContextRelease(context); 

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0); 

    return pxbuffer; 
} 

나는 문제가 CVPixelBufferRef 할당에 있음을 발견하지만 난 이해가 안 돼요 왜 - 누군가가 문제를 볼 수 있습니까?

모두 pxBufferpixelBufferPool이 해제되지 않은, -pixelBufferFromCGImage:에서는

답변

1

감사드립니다. pxBuffer은 반환 값이므로 의미가 있지만 pixelBufferPool은 그렇지 않습니다. - 메서드 호출 당 하나씩 만들고 누출됩니다.

빠른 수정은 또한 명확하게하기 위해 -pixelBufferFromCGImage:-createPixelBufferFromCGImage:에 이름을 변경해야 -extractFrameFromImage:

-pixelBufferFromCGImage:

  • 출시 pxBuffer (-pixelBufferFromCGImage:의 반환 값)에

    1. 출시 pixelBufferPool에 있어야한다 그것은 유지 된 객체를 반환한다는 것입니다.

  • +0

    CGContextDrawImage (CGRectMake (0, 0, width, height), image)에서 여전히 오류가 발생합니다. – tagyro

    +0

    @AndreiStoleru 코드를 현재 버전으로 업데이트하고 누수에 대한 세부 정보를 제공 할 수 있습니까? 지금 유출 됐어? –

    +0

    이제는 apple에서 제공하는 샘플 코드를 사용하여 CMSampleBufferRef에서 UIImage로 변환했지만 결과 이미지의 방향이 잘못되었습니다 (http://stackoverflow.com/q/11246726/401087) – tagyro