1

2 인물 사진이 있습니다. 하나는 기본 아이폰 카메라에서 찍은 두 번째 하나는 내 응용 프로그램에서 UIIMagePickerController를 사용하여 기록입니다.비디오에 CIFilter를 제대로 적용하지 않습니다.

비디오 1에 cifilter를 적용하면 필터가 완벽하게 적용되지만 두 번째 비디오에 필터를 적용하면 비디오가 절반으로 흐려지고 비디오 부분이 흐려지고 늘어납니다. 그러면 내보낼 때 회전해야합니다. 이 코드는 두 번째 비디오를 위해 일하지 않는

AVAssetTrack *FirstAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 

CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectInstant"]; 

player.currentItem.videoComposition = [AVVideoComposition videoCompositionWithAsset: asset applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest *request){ 
    // Clamp to avoid blurring transparent pixels at the image edges 

    CIImage *source = [request.sourceImage imageByClampingToExtent]; 
    source = [source imageByApplyingTransform:FirstAssetTrack.preferredTransform]; 

    [filter setValue:source forKey:kCIInputImageKey]; 

    // Crop the blurred output to the bounds of the original image 
    CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent]; 

    // Provide the filter output to the composition 
    [request finishWithImage:output context:nil]; 
}]; 

내 코드는 그래서 두 번째 비디오 에 대한 몇 가지 변경 사항이 적절한 코드가 아닙니다하지만 난 크기와 방향을 확인하고 싶은 후 방향 변화의 작품을 잘 AVPlayer를에서 플레이하지만 내가이 링크를 확인

AVPlayer plays video composition result incorrectly

회전 내보낼 때 우리 모두 직면 같은 문제 때문에 내가이 여전히 가공용하지에 따라 내 코드를 변경하는 경우 올바르게 입력하십시오.

AVAssetTrack *FirstAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectInstant"]; 


UIImageOrientation FirstAssetOrientation_ = UIImageOrientationUp; 
BOOL isFirstAssetPortrait_ = NO; 
CGAffineTransform firstTransform = FirstAssetTrack.preferredTransform; 
if(firstTransform.a == 0 && firstTransform.b == 1.0 && firstTransform.c == -1.0 && firstTransform.d == 0) { 
    FirstAssetOrientation_= UIImageOrientationRight; 
    isFirstAssetPortrait_ = YES; 
} 
if(firstTransform.a == 0 && firstTransform.b == -1.0 && firstTransform.c == 1.0 && firstTransform.d == 0) { 
    FirstAssetOrientation_ = UIImageOrientationLeft; 
    isFirstAssetPortrait_ = YES; 
} 
if(firstTransform.a == 1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == 1.0) { 
    FirstAssetOrientation_ = UIImageOrientationUp; 
} 
if(firstTransform.a == -1.0 && firstTransform.b == 0 && firstTransform.c == 0 && firstTransform.d == -1.0) { 
    FirstAssetOrientation_ = UIImageOrientationDown; 
} 

player.currentItem.videoComposition = [AVVideoComposition videoCompositionWithAsset:asset applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest * _Nonnull request) { 
    // Step 1: get the input frame image (screenshot 1) 
    CIImage *sourceImage = request.sourceImage; 

    // Step 2: rotate the frame 
    CIFilter *transformFilter = [CIFilter filterWithName:@"CIAffineTransform"]; 
    [transformFilter setValue:sourceImage forKey: kCIInputImageKey]; 
    [transformFilter setValue: [NSValue valueWithCGAffineTransform: firstTransform] forKey: kCIInputTransformKey]; 
    sourceImage = transformFilter.outputImage; 
    CGRect extent = sourceImage.extent; 
    CGAffineTransform translation = CGAffineTransformMakeTranslation(-extent.origin.x, -extent.origin.y); 
    [transformFilter setValue:sourceImage forKey: kCIInputImageKey]; 
    [transformFilter setValue: [NSValue valueWithCGAffineTransform: translation] forKey: kCIInputTransformKey]; 
    sourceImage = transformFilter.outputImage; 

    // Step 3: apply the custom filter chosen by the user 
    extent = sourceImage.extent; 
    sourceImage = [sourceImage imageByClampingToExtent]; 
    [filter setValue:sourceImage forKey:kCIInputImageKey]; 
    sourceImage = filter.outputImage; 
    sourceImage = [sourceImage imageByCroppingToRect:extent]; 

    // make the frame the same aspect ratio as the original input frame 
    // by adding empty spaces at the top and the bottom of the extent rectangle 
    CGFloat newHeight = 1920 * 1920/extent.size.height; 
    CGFloat inset = (extent.size.height - newHeight)/2; 
    extent = CGRectInset(extent, 0, inset); 
    sourceImage = [sourceImage imageByCroppingToRect:extent]; 

    // scale down to the original frame size 
    CGFloat scale = 1920/newHeight; 
    CGAffineTransform scaleTransform = CGAffineTransformMakeScale(scale, scale*3.2); 
    [transformFilter setValue:sourceImage forKey: kCIInputImageKey]; 
    [transformFilter setValue: [NSValue valueWithCGAffineTransform: scaleTransform] forKey: kCIInputTransformKey]; 
    sourceImage = transformFilter.outputImage; 

    // translate the frame to make it's origin start at (0, 0) 
    CGAffineTransform translation1 = CGAffineTransformMake(1, 0, 0, 1, 0, 0); 
    [transformFilter setValue:sourceImage forKey: kCIInputImageKey]; 
    [transformFilter setValue: [NSValue valueWithCGAffineTransform: translation1] forKey: kCIInputTransformKey]; 
    sourceImage = transformFilter.outputImage; 

    // Step 4: finish processing the frame (screenshot 2) 
    [request finishWithImage:sourceImage context:nil]; 

}]; 

답변

2

변환 선을 제거하면 문제가 발생합니다.

만이 하나

source = [source imageByApplyingTransform:FirstAssetTrack.preferredTransform]; 

그리고 확인하십시오. :)

AVAssetTrack *FirstAssetTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 

CIFilter *filter = [CIFilter filterWithName:@"CIPhotoEffectInstant"]; 

player.currentItem.videoComposition = [AVVideoComposition videoCompositionWithAsset: asset applyingCIFiltersWithHandler:^(AVAsynchronousCIImageFilteringRequest *request){ 
    // Clamp to avoid blurring transparent pixels at the image edges 

    CIImage *source = [request.sourceImage imageByClampingToExtent]; 

    [filter setValue:source forKey:kCIInputImageKey]; 

    // Crop the blurred output to the bounds of the original image 
    CIImage *output = [filter.outputImage imageByCroppingToRect:request.sourceImage.extent]; 

    // Provide the filter output to the composition 
    [request finishWithImage:output context:nil]; 
}];