2017-04-25 12 views
-2
> - (void) createWatermark:(UIImage*)image video:(NSURL*)videoURL 
{ 
    if (videoURL == nil) 
     return; 

    AppDelegate* appDelegate = [[UIApplication sharedApplication] delegate]; 
    //[appDelegate showLoadingView: YES]; 

    AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:videoURL options:nil]; 
    AVMutableComposition* mixComposition = [AVMutableComposition composition]; 

    AVMutableCompositionTrack* compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid]; 

    AVAssetTrack* clipVideoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    [compositionVideoTrack insertTimeRange:CMTimeRangeMake(kCMTimeZero, videoAsset.duration) 
            ofTrack:clipVideoTrack 
            atTime:kCMTimeZero error:nil]; 

    [compositionVideoTrack setPreferredTransform:[[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] preferredTransform]]; 

    // create the layer with the watermark image 
    CALayer* aLayer = [CALayer layer]; 
    aLayer.contents = (id)image.CGImage; 
    aLayer.frame = CGRectMake(50, 100, image.size.width, image.size.height); 
    aLayer.opacity = 0.9; 

    //sorts the layer in proper order 

    AVAssetTrack* videoTrack = [[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    CGSize videoSize = [videoTrack naturalSize]; 
    CALayer *parentLayer = [CALayer layer]; 
    CALayer *videoLayer = [CALayer layer]; 
    parentLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
    videoLayer.frame = CGRectMake(0, 0, videoSize.width, videoSize.height); 
    [parentLayer addSublayer:videoLayer]; 
    [parentLayer addSublayer:aLayer]; 

    // create text Layer 
    CATextLayer* titleLayer = [CATextLayer layer]; 
    titleLayer.backgroundColor = [UIColor clearColor].CGColor; 
    titleLayer.string = @"Dummy text"; 
    titleLayer.font = CFBridgingRetain(@"Helvetica"); 
    titleLayer.fontSize = 28; 
    titleLayer.shadowOpacity = 0.5; 
    titleLayer.alignmentMode = kCAAlignmentCenter; 
    titleLayer.frame = CGRectMake(0, 50, videoSize.width, videoSize.height/6); 
    [parentLayer addSublayer:titleLayer]; 

    //create the composition and add the instructions to insert the layer: 

    AVMutableVideoComposition* videoComp = [AVMutableVideoComposition videoComposition]; 
    videoComp.renderSize = videoSize; 
    videoComp.frameDuration = CMTimeMake(1, 30); 
    videoComp.animationTool = [AVVideoCompositionCoreAnimationTool videoCompositionCoreAnimationToolWithPostProcessingAsVideoLayer:videoLayer inLayer:parentLayer]; 

    /// instruction 
    AVMutableVideoCompositionInstruction* instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction]; 

    instruction.timeRange = CMTimeRangeMake(kCMTimeZero, [mixComposition duration]); 
    AVAssetTrack* mixVideoTrack = [[mixComposition tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0]; 
    AVMutableVideoCompositionLayerInstruction* layerInstruction = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:mixVideoTrack]; 
    instruction.layerInstructions = [NSArray arrayWithObject:layerInstruction]; 
    videoComp.instructions = [NSArray arrayWithObject: instruction]; 

    // export video 

    _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetMediumQuality]; 
    _assetExport.videoComposition = videoComp; 

    NSLog (@"created exporter. supportedFileTypes: %@", _assetExport.supportedFileTypes); 

    NSString* videoName = @"NewWatermarkedVideo.mov"; 

    NSString* exportPath = [NSTemporaryDirectory() stringByAppendingPathComponent:videoName]; 
    NSURL* exportUrl = [NSURL fileURLWithPath:exportPath]; 

    if ([[NSFileManager defaultManager] fileExistsAtPath:exportPath]) 
     [[NSFileManager defaultManager] removeItemAtPath:exportPath error:nil]; 

    _assetExport.outputFileType = AVFileTypeQuickTimeMovie; 
    _assetExport.outputURL = exportUrl; 
    _assetExport.shouldOptimizeForNetworkUse = YES; 

    [_assetExport exportAsynchronouslyWithCompletionHandler: 
    ^(void) { 

     //[appDelegate showLoadingView:NO]; 

     //Final code here 

     switch (_assetExport.status) 
     { 
      case AVAssetExportSessionStatusUnknown: 
       NSLog(@"Unknown"); 
       break; 
      case AVAssetExportSessionStatusWaiting: 
       NSLog(@"Waiting"); 
       break; 
      case AVAssetExportSessionStatusExporting: 
       NSLog(@"Exporting"); 
       break; 
      case AVAssetExportSessionStatusCompleted: 
       NSLog(@"Created new water mark image"); 
       playBtn.hidden = NO; 
       break; 
      case AVAssetExportSessionStatusFailed: 
       NSLog(@"Failed- %@", _assetExport.error); 
       break; 
      case AVAssetExportSessionStatusCancelled: 
       NSLog(@"Cancelled"); 
       break; 
     } 
    } 
    ]; 
} 
> Use of unresolved identifier 'presentMoviePlayerViewControllerAnimated' 
> 
> 'videoComposition()' is unavailable: use object construction 'AVMutableVideoComposition()' 

열거 경우 '알 수없는'유형에서 찾을 수 없습니다 오류의 그것 보여주는 많은 'AVAssetExportSessionStatus?'동영상에 워터 마크를 추가하는 것을 시도하고있다 Object C에서 매우 잘 작동합니다. 인수 레이블 '(_ :)'이 (가) 사용 가능한 오버로드와 일치하지 않습니다. 이들은 신속한 작업 중에 발생하는 몇 가지 오류입니다. 신속한 지원이 전혀 필요하지 않습니다.은 내가 객관적 C에서 그것을했지만 빠른

+1

. 모든 코드는 객관적입니다 - C는 잘 작동한다고합니다. 질문에 대한 답을 쉽게 얻으려면 신속한 깨진 코드의 예를 제공해주십시오. – torinpitchers

답변

0

당신의 SWIFT 3.0 버전 : 당신은 당신이 신속 코드의 예제를 제공하지 않지만 당신이 당신의 SWIFT 코드에서 오류를 얻고 말했다

func createWatermark(_ image: UIImage, video videoURL: URL) 
    { 
       if videoURL == nil 
    { 
        return 
       } 
       let appDelegate: AppDelegate? = UIApplication.shared.delegate 
       //[appDelegate showLoadingView: YES]; 
       let videoAsset = AVURLAsset(url: videoURL, options: nil) 
       let mixComposition = AVMutableComposition() 
       let compositionVideoTrack: AVMutableCompositionTrack? = mixComposition.addMutableTrack(withMediaType: AVMediaTypeVideo, preferredTrackID: kCMPersistentTrackID_Invalid) 
       let clipVideoTrack: AVAssetTrack? = (videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] as? AVAssetTrack) 
       try? compositionVideoTrack?.insertTimeRange(CMTimeRangeMake(kCMTimeZero, videoAsset.duration), of: clipVideoTrack, atTime: kCMTimeZero) 
       compositionVideoTrack?.preferredTransform = videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0].preferredTransform 
      } 



    // create the layer with the watermark image 
    var aLayer = CALayer() 
    aLayer.contents = (image.cgImage as? Any) 
    aLayer.frame = CGRect(x: CGFloat(50), y: CGFloat(100), width: CGFloat(image.size.width), height: CGFloat(image.size.height)) 
    aLayer.opacity = 0.9 

    //sorts the layer in proper order 
    var videoTrack: AVAssetTrack? = (videoAsset.tracks(withMediaType: AVMediaTypeVideo)[0] as? AVAssetTrack) 
    var videoSize: CGSize? = videoTrack?.naturalSize 
    var parentLayer = CALayer() 
    var videoLayer = CALayer() 
    parentLayer.frame = CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(videoSize?.width), height: CGFloat(videoSize?.height)) 
    videoLayer.frame = CGRect(x: CGFloat(0), y: CGFloat(0), width: CGFloat(videoSize?.width), height: CGFloat(videoSize?.height)) 
    parentLayer.addSublayer(videoLayer) 
    parentLayer.addSublayer(aLayer) 

     // create text Layer 
    var titleLayer = CATextLayer() 
    titleLayer.backgroundColor = UIColor.clear.cgColor 
    titleLayer.string = "Dummy text" 
    titleLayer.font = CFBridgingRetain("Helvetica") 
    titleLayer.fontSize = 28 
    titleLayer.shadowOpacity = 0.5 
    titleLayer.alignmentMode = kCAAlignmentCenter 
    titleLayer.frame = CGRect(x: CGFloat(0), y: CGFloat(50), width: CGFloat(videoSize.width), height: CGFloat(videoSize.height/6)) 
    parentLayer.addSublayer(titleLayer) 

    //create the composition and add the instructions to insert the layer: 
    var videoComp = AVMutableVideoComposition.videoComposition 
    videoComp.renderSize = videoSize 
    videoComp.frameDuration = CMTimeMake(1, 30) 
    videoComp.animationTool = AVVideoCompositionCoreAnimationTool(postProcessingAs: videoLayer, in: parentLayer) 

    /// instruction 
var instruction = AVMutableVideoCompositionInstruction.videoCompositionInstruction 
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, mixComposition.duration) 
var mixVideoTrack: AVAssetTrack? = (mixComposition.tracks(withMediaType: AVMediaTypeVideo)[0] as? AVAssetTrack) 
var layerInstruction = AVMutableVideoCompositionLayerInstruction(mixVideoTrack) 
instruction.layerInstructions = [layerInstruction] 
videoComp.instructions = [instruction] 

// export video 
assetExport = AVAssetExportSession(asset: mixComposition, presetName: AVAssetExportPresetMediumQuality) 
assetExport.videoComposition = videoComp 
print("created exporter. supportedFileTypes: \(assetExport.supportedFileTypes)") 
var videoName: String = "NewWatermarkedVideo.mov" 
var exportPath: String = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent(videoName).absoluteString 
var exportUrl = URL(fileURLWithPath: exportPath) 
if FileManager.default.fileExists(atPath: exportPath) { 
    try? FileManager.default.removeItem(atPath: exportPath) 
} 
assetExport.outputFileType = AVFileTypeQuickTimeMovie 
assetExport.outputURL = exportUrl 
assetExport.shouldOptimizeForNetworkUse = true 


assetExport.exportAsynchronously(withCompletionHandler: {(_: Void) -> Void in 
    //[appDelegate showLoadingView:NO]; 
    //Final code here 
    switch assetExport.status { 
     case .unknown: 
      print("Unknown") 
     case .waiting: 
      print("Waiting") 
     case .exporting: 
      print("Exporting") 
     case .completed: 
      print("Created new water mark image") 
      playBtn.isHidden = false 
     case .failed: 
      print("Failed- \(assetExport.error)") 
     case .cancelled: 
      print("Cancelled") 
    } 

})