2013-04-10 1 views
1

AVCaptureScreenInput을 사용하여 화면을 샘플링하고 AVCaptureVideoDataOutput을 사용하여 출력하는 중입니다. 작동하지 않습니다. 출력되는 이미지는 비어 있지만 읽은 모든 문서에 따라 모든 작업을 수행하고있는 것처럼 보입니다.AVCaptureScreenInput에서 빈 스틸 캡처?

내가 AVCaptureVideoDataOutputCGImage(kCVPixelFormatType_32BGRA)으로 읽을 수있는 것으로 만들었습니다. 이 코드를 실행하여 AVCaptureMovieFileOutput으로 출력하면 영화가 잘 렌더링되고 모든 것이 멋지게 보이지만 실제로 원하는 것은 일련의 이미지입니다.

#import "ScreenRecorder.h" 
#import <QuartzCore/QuartzCore.h> 

@interface ScreenRecorder() <AVCaptureFileOutputRecordingDelegate, AVCaptureVideoDataOutputSampleBufferDelegate> { 
    BOOL _isRecording; 
@private 
    AVCaptureSession *_session; 
    AVCaptureOutput *_movieFileOutput; 
    AVCaptureStillImageOutput *_imageFileOutput; 

    NSUInteger _frameIndex; 

    NSTimer *_timer; 

    NSString *_outputDirectory; 
} 
@end 

@implementation ScreenRecorder 

- (BOOL)recordDisplayImages:(CGDirectDisplayID)displayId toURL:(NSURL *)fileURL windowBounds:(CGRect)windowBounds duration:(NSTimeInterval)duration { 
    if (_isRecording) { 
     return NO; 
    } 

    _frameIndex = 0; 

    // Create a capture session 
    _session = [[AVCaptureSession alloc] init]; 

    // Set the session preset as you wish 
    _session.sessionPreset = AVCaptureSessionPresetHigh; 

    // Create a ScreenInput with the display and add it to the session 
    AVCaptureScreenInput *input = [[[AVCaptureScreenInput alloc] initWithDisplayID:displayId] autorelease]; 
    if (!input) { 
     [_session release]; 
     _session = nil; 
     return NO; 
    } 
    if ([_session canAddInput:input]) { 
     [_session addInput:input]; 
    } 

    input.cropRect = windowBounds; 

    // Create a MovieFileOutput and add it to the session 
    _movieFileOutput = [[[AVCaptureVideoDataOutput alloc] init] autorelease]; 
    [((AVCaptureVideoDataOutput *)_movieFileOutput) setVideoSettings:[NSDictionary dictionaryWithObjectsAndKeys:@(kCVPixelFormatType_32BGRA),kCVPixelBufferPixelFormatTypeKey, nil]]; 
// ((AVCaptureVideoDataOutput *)_movieFileOutput).alwaysDiscardsLateVideoFrames = YES; 

    if ([_session canAddOutput:_movieFileOutput]) 
     [_session addOutput:_movieFileOutput]; 

    // Start running the session 
    [_session startRunning]; 

    // Delete any existing movie file first 
    if ([[NSFileManager defaultManager] fileExistsAtPath:[fileURL path]]) 
    { 
     NSError *err; 
     if (![[NSFileManager defaultManager] removeItemAtPath:[fileURL path] error:&err]) 
     { 
      NSLog(@"Error deleting existing movie %@",[err localizedDescription]); 
     } 
    } 

    _outputDirectory = [[fileURL path] retain]; 
    [[NSFileManager defaultManager] createDirectoryAtPath:_outputDirectory withIntermediateDirectories:YES attributes:nil error:nil];  

    // Set the recording delegate to self 
    dispatch_queue_t queue = dispatch_queue_create("com.schaefer.lolz", 0); 
    [(AVCaptureVideoDataOutput *)_movieFileOutput setSampleBufferDelegate:self queue:queue]; 
    //dispatch_release(queue); 

    if (0 != duration) { 
     _timer = [[NSTimer scheduledTimerWithTimeInterval:duration target:self selector:@selector(finishRecord:) userInfo:nil repeats:NO] retain]; 
    } 
    _isRecording = YES; 

    return _isRecording; 
} 

- (void)dealloc 
{ 
    if (nil != _session) { 
     [_session stopRunning]; 
     [_session release]; 
    } 

    [_outputDirectory release]; 
    _outputDirectory = nil; 

    [super dealloc]; 
} 

- (void)stopRecording { 
    if (!_isRecording) { 
     return; 
    } 
    _isRecording = NO; 

    // Stop recording to the destination movie file 
    if ([_movieFileOutput isKindOfClass:[AVCaptureFileOutput class]]) { 
     [_movieFileOutput performSelector:@selector(stopRecording)]; 
    } 
    [_session stopRunning]; 

    [_session release]; 
    _session = nil; 

    [_timer release]; 
    _timer = nil; 

} 

-(void)finishRecord:(NSTimer *)timer 
{ 
    [self stopRecording]; 
} 

//AVCaptureVideoDataOutputSampleBufferDelegate 
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { 
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer); 
    CVPixelBufferLockBaseAddress(imageBuffer,0);  // Lock the image buffer 

    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); // Get information of the image 
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer); 
    size_t width = CVPixelBufferGetWidth(imageBuffer); 
    size_t height = CVPixelBufferGetHeight(imageBuffer); 
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB(); 

    CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst); 
    CGImageRef image = CGBitmapContextCreateImage(newContext); 
    CGContextRelease(newContext); 

    CGColorSpaceRelease(colorSpace); 
    _frameIndex++;  
    CVPixelBufferUnlockBaseAddress(imageBuffer,0); 

    dispatch_async(dispatch_get_main_queue(), ^{ 
     NSURL *URL = [NSURL fileURLWithPath:[_outputDirectory stringByAppendingPathComponent:[NSString stringWithFormat:@"%d.jpg", (int)_frameIndex]]]; 

     CGImageDestinationRef destination = CGImageDestinationCreateWithURL((CFURLRef)URL, kUTTypeJPEG, 1, NULL); 
     CGImageDestinationAddImage(destination, image, nil); 

     if (!CGImageDestinationFinalize(destination)) { 
      NSLog(@"Failed to write image to %@", URL); 
     } 

     CFRelease(destination); 
     CFRelease(image); 
    }); 
} 
@end 

답변

0

귀하의 데이터는 평면이 아니므면 0에 대한 기본 주소가없는 - 아니면 0가 없습니다 당신이 얻을 CVPixelBufferGetBaseAddress이 필요합니다 (당신이 CVPixelBufferIsPlanar 확인 할 수 있는지 확인합니다.) 첫 번째 픽셀에 대한 포인터. 모든 데이터가 인터리브됩니다.

+0

감사합니다. 그러나 이것은 여전히 ​​나에게 일하는 이미지를주지 않습니다. 내 CGBitmapImageContext에 문제가 있습니까? –

+0

에 캡처를 위해 사용하는 픽셀 포맷이 주어지면'kCGImageAlphaPremultipliedFirst'가 아닌'kCGImageAlphaPremultipliedLast'가 필요할 것입니다. –