AVCaptureSession을 사용하여 내 iOS 앱에서 이미지를 캡처하고 싶습니다. Apple의 AVCam 샘플 코드 https://developer.apple.com/library/ios/samplecode/AVCam/Introduction/Intro.html을 기반으로 이미지를 캡처 할 때마다 미리보기 ImageView를 표시하려고했습니다. processImage를 캡처 한 후AVCaptureStillImageOutput에서 마지막으로 캡쳐 한 이미지 미리보기
// Capture a still image.
[[self stillImageOutput] captureStillImageAsynchronouslyFromConnection:[[self stillImageOutput] connectionWithMediaType:AVMediaTypeVideo] completionHandler:^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (imageDataSampleBuffer)
{
NSData *imageData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
[self processImage:[UIImage imageWithData:imageData]];
}
}];
는
- (void) processImage:(UIImage *)image
{
[[self postButton] setEnabled:YES];
[[self postButton] setHidden:NO];
[[self cancelButton] setEnabled:YES];
[[self cancelButton] setHidden:NO];
_preview = [[UIImageView alloc] init];
[_preview setImage:image];
_preview.hidden = NO;
}
라고하지만가 표시 될 때 이미지 뷰가 비어/아직 변경되지 않습니다.
누군가 나를 따라갈 수 있습니까? 나를
AVCaptureConnection *connection = [_currentOutput connectionWithMediaType:AVMediaTypeVideo];
[self _setOrientationForConnection:connection];
[_captureOutputPhoto captureStillImageAsynchronouslyFromConnection:connection completionHandler:
^(CMSampleBufferRef imageDataSampleBuffer, NSError *error) {
if (!imageDataSampleBuffer) {
DLog(@"failed to obtain image data sample buffer");
// return delegate error
return;
}
if (error) {
if ([_delegate respondsToSelector:@selector(vision:capturedPhoto:error:)]) {
[_delegate vision:self capturedPhoto:nil error:error];
}
return;
}
NSMutableDictionary *photoDict = [[NSMutableDictionary alloc] init];
NSDictionary *metadata = nil;
// add photo metadata (ie EXIF: Aperture, Brightness, Exposure, FocalLength, etc)
metadata = (__bridge NSDictionary *)CMCopyDictionaryOfAttachments(kCFAllocatorDefault, imageDataSampleBuffer, kCMAttachmentMode_ShouldPropagate);
if (metadata) {
[photoDict setObject:metadata forKey:PBJVisionPhotoMetadataKey];
CFRelease((__bridge CFTypeRef)(metadata));
} else {
DLog(@"failed to generate metadata for photo");
}
// add JPEG and image data
NSData *jpegData = [AVCaptureStillImageOutput jpegStillImageNSDataRepresentation:imageDataSampleBuffer];
if (jpegData) {
// add JPEG
[photoDict setObject:jpegData forKey:PBJVisionPhotoJPEGKey];
// add image
UIImage *image = [self _uiimageFromJPEGData:jpegData];
if (image) {
[photoDict setObject:image forKey:PBJVisionPhotoImageKey];
} else {
DLog(@"failed to create image from JPEG");
// TODO: return delegate on error
}
// add thumbnail
UIImage *thumbnail = [self _thumbnailJPEGData:jpegData];
if (thumbnail) {
[photoDict setObject:thumbnail forKey:PBJVisionPhotoThumbnailKey];
} else {
DLog(@"failed to create a thumnbail");
// TODO: return delegate on error
}
} else {
DLog(@"failed to create jpeg still image data");
// TODO: return delegate on error
}
if ([_delegate respondsToSelector:@selector(vision:capturedPhoto:error:)]) {
[_delegate vision:self capturedPhoto:photoDict error:error];
}
// run a post shot focus
[self performSelector:@selector(_focus) withObject:nil afterDelay:0.5f];
}];