2017-03-02 68 views
0

AVFoundation을 사용하여 비디오를 녹화하는 샘플 응용 프로그램을 만들고 있습니다. 전체적인 요점은 비디오 녹화 방식을보다 잘 제어 할 수 있다는 것입니다. 내 샘플 프로젝트에서 나는 비디오 캡처가 있지만 방향을 올바르게 처리하는 데 어려움을 겪고 있습니다.iOS AVAssetWriter를 사용하여 비디오를 캡처 할 때 방향을 올바르게 처리하는 방법

나는 웹 검색을 많이 해왔고 다른 사람들이 내 캡처보기 또는 캡처 세션이 방향에 따라 회전하는 것을 허용해서는 안되지만 재생 중에 비디오를 회전시키는 변환을 설정한다는 것을 알았습니다. iOS 및 Mac 기기에서 정상적으로 작동하지만 Windows 나 Android와 같은 다른 플랫폼에서 문제가 있는지 궁금합니다.

또한 녹화 된 비디오의 메타 데이터를 볼 때 방향에 대해 너비와 높이가 올바르게 설정되어 있지 않습니다. 비디오의 프리젠 테이션 만 변형하는 것이지 실제 해상도는 아닙니다.

여기 내 질문은 세로 및 가로 방향을 올바르게 지원하고 비디오 파일 출력에 올바르게 반영시키는 방법입니다. 이 비디오가 모든 플랫폼에서 올바르게 재생되어야하므로 해상도가 중요하다고 생각합니다.

아래는 제가 지금까지 작성한 전체 출처입니다. 나는 당신이 제공 할 수있는 조언에 감사드립니다.

import UIKit 
import AVFoundation 

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { 

    //MARK: - Outlet 

    @IBOutlet weak var previewView: UIView! 
    @IBOutlet var playStopButton: UIButton! 

    //MARK: - Private Variables 

    let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil) 
    private let captureSession = AVCaptureSession() 

    var outputUrl: URL { 
     get { 

      if let url = _outputUrl { 
       return url 
      } 


      _outputUrl = outputDirectory.appendingPathComponent("video.mp4") 
      return _outputUrl! 
     } 
    } 

    private var _outputUrl: URL? 

    var outputDirectory: URL { 
     get { 

      if let url = _outputDirectory { 
       return url 
      } 


      _outputDirectory = getDocumentsDirectory().appendingPathComponent("recording") 
      return _outputDirectory! 
     } 
    } 

    private var _outputDirectory: URL? 

    private var assetWriter: AVAssetWriter? 
    private var videoInput: AVAssetWriterInput? 
    private var audioInput: AVAssetWriterInput? 
    private var videoOutput: AVCaptureVideoDataOutput? 
    private var audioOutput: AVCaptureAudioDataOutput? 

    private var isRecording = false 
    private var isWriting = false 

    private var videoSize = CGSize(width: 640, height: 480) 

    //MARK: - View Life-cycle 

    override func viewDidLoad() { 
     super.viewDidLoad() 


     videoQueue.async { 

      do { 

       try self.configureCaptureSession() 
       try self.configureAssetWriter() 

       DispatchQueue.main.async { 
        self.configurePreview() 
       } 

      } catch { 

       DispatchQueue.main.async { 
        self.showAlert("Unable to configure video output") 
       } 
      } 
     } 
    } 

    override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation { 
     return .portrait 
    } 

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask { 
     return .portrait 
    } 

    //MARK: - Capture Session 

    private func configureCaptureSession() throws { 

     do { 

      // configure the session 
      if captureSession.canSetSessionPreset(AVCaptureSessionPreset640x480) { 
       captureSession.sessionPreset = AVCaptureSessionPreset640x480 
      } 

      // configure capture devices 
      let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) 
      let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) 

      let camInput = try AVCaptureDeviceInput(device: camDevice) 
      let micInput = try AVCaptureDeviceInput(device: micDevice) 

      if captureSession.canAddInput(camInput) { 
       captureSession.addInput(camInput) 
      } 

      if captureSession.canAddInput(micInput) { 
       captureSession.addInput(micInput) 
      } 

      // configure audio/video output 
      videoOutput = AVCaptureVideoDataOutput() 
      videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary? 
      videoOutput?.setSampleBufferDelegate(self, queue: videoQueue) 

      if let v = videoOutput { 
       captureSession.addOutput(v) 
      } 

      audioOutput = AVCaptureAudioDataOutput() 
      audioOutput?.setSampleBufferDelegate(self, queue: videoQueue) 

      if let a = audioOutput { 
       captureSession.addOutput(a) 
      } 

      // configure audio session 
      let audioSession = AVAudioSession.sharedInstance() 
      try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord) 
      try audioSession.setActive(true) 

      var micPort: AVAudioSessionPortDescription? 

      if let inputs = audioSession.availableInputs { 
       for port in inputs { 
        if port.portType == AVAudioSessionPortBuiltInMic { 
         micPort = port 
         break; 
        } 
       } 
      } 

      if let port = micPort, let dataSources = port.dataSources { 

       for source in dataSources { 
        if source.orientation == AVAudioSessionOrientationFront { 
         try audioSession.setPreferredInput(port) 
         break 
        } 
       } 
      } 

     } catch { 
      print("Failed to configure audio/video capture session") 
      throw error 
     } 
    } 

    private func configureAssetWriter() throws { 

     prepareVideoFile() 

     do { 

      assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4) 

      guard let writer = assetWriter else { 
       print("Asset writer not created") 
       return 
      } 

      let vidSize = videoSize 
      let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264, 
           AVVideoWidthKey: NSNumber(value: Float(vidSize.width)), 
           AVVideoHeightKey: NSNumber(value: Float(vidSize.height))] 

      videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings) 
      videoInput?.expectsMediaDataInRealTime = true 
      videoInput?.transform = getVideoTransform() 

      var channelLayout = AudioChannelLayout() 
      memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size); 
      channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo; 

      let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC, 
               AVSampleRateKey: 44100, 
               AVNumberOfChannelsKey: 2] 

      audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings) 
      audioInput?.expectsMediaDataInRealTime = true 

      guard let vi = videoInput else { 
       print("Video input not configured") 
       return 
      } 

      guard let ai = audioInput else { 
       print("Audio input not configured") 
       return 
      } 

      if writer.canAdd(vi) { 
       writer.add(vi) 
      } 

      if writer.canAdd(ai) { 
       writer.add(ai) 
      } 

     } catch { 
      print("Failed to configure asset writer") 
      throw error 
     } 
    } 

    private func prepareVideoFile() { 

     if FileManager.default.fileExists(atPath: outputUrl.path) { 

      do { 
       try FileManager.default.removeItem(at: outputUrl) 
      } catch { 
       print("Unable to remove file at URL \(outputUrl)") 
      } 
     } 

     if !FileManager.default.fileExists(atPath: outputDirectory.path) { 

      do { 
       try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil) 
      } catch { 
       print("Unable to create directory at URL \(outputDirectory)") 
      } 
     } 
    } 

    private func configurePreview() { 

     if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) { 
      previewLayer.frame = previewView.bounds 
      previewView.layer.addSublayer(previewLayer) 
     } 
    } 

    private func getVideoSize() -> CGSize { 

     if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight { 

      if videoSize.width > videoSize.height { 
       return videoSize 
      } else { 
       return CGSize(width: videoSize.height, height: videoSize.width) 
      } 

     } else { 

      if videoSize.width < videoSize.height { 
       return videoSize 
      } else { 
       return CGSize(width: videoSize.height, height: videoSize.width) 
      } 
     } 
    } 

    private func getVideoTransform() -> CGAffineTransform { 

     switch UIDevice.current.orientation { 

     case .portraitUpsideDown: 
      return CGAffineTransform(rotationAngle: CGFloat((M_PI * -90.0))/180.0) 

     case .landscapeLeft: 
      return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0))/180.0) // TODO: Add support for front facing camera 
//   return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0))/180.0) // TODO: For front facing camera 

     case .landscapeRight: 
      return CGAffineTransform(rotationAngle: CGFloat((M_PI * 0.0))/180.0) // TODO: Add support for front facing camera 
//   return CGAffineTransform(rotationAngle: CGFloat((M_PI * -180.0))/180.0) // TODO: For front facing camera 

     default: 
      return CGAffineTransform(rotationAngle: CGFloat((M_PI * 90.0))/180.0) 
     } 
    } 

    //MARK: - Controls 

    private func startRecording() { 

     videoQueue.async { 
      self.captureSession.startRunning() 
     } 

     isRecording = true 
     playStopButton.setTitle("Stop Recording", for: .normal) 
     print("Recording did start") 
    } 

    private func stopRecording() { 

     if !isRecording { 
      return 
     } 

     videoQueue.async { 

      self.assetWriter?.finishWriting { 
       print("Asset writer did finish writing") 
       self.isWriting = false 
      } 

      self.captureSession.stopRunning() 
     } 

     isRecording = false 

     playStopButton.setTitle("Start Recording", for: .normal) 
     print("Recording did stop") 
    } 

    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate 

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { 

     guard let w = assetWriter else { 
      print("Asset writer not configured") 
      return 
     } 

     guard let vo = videoOutput else { 
      print("Video output not configured") 
      return 
     } 

     guard let ao = audioOutput else { 
      print("Audio output not configured") 
      return 
     } 

     guard let vi = videoInput else { 
      print("Video input not configured") 
      return 
     } 

     guard let ai = audioInput else { 
      print("Audio input not configured") 
      return 
     } 

     let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) 

     print("Writer status \(w.status.rawValue)") 

     if let e = w.error { 
      print("Writer error \(e)") 
      stopRecording() 
      return 
     } 

     switch w.status { 

     case .unknown: 

      if !isWriting { 
       isWriting = true 
       w.startWriting() 
       w.startSession(atSourceTime: st) 
      } 

      return 

     case .completed: 
      print("Video writing completed") 
      return 

     case .cancelled: 
      print("Video writing cancelled") 
      return 

     case .failed: 
      print("Video writing failed") 
      return 

     default: 
      print("Video is writing") 
     } 

     if vo == captureOutput { 

      if !vi.append(sampleBuffer) { 
       print("Unable to write to video buffer") 
      } 

     } else if ao == captureOutput { 

      if !ai.append(sampleBuffer) { 
       print("Unable to write to audio buffer") 
      } 
     } 
    } 

    //MARK: Helpers 

    private func getDocumentsDirectory() -> URL { 
     let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask) 
     let documentsDirectory = paths[0] 
     return documentsDirectory 
    } 

    //MARK: Actions 

    @IBAction func startStopTapped(sender: AnyObject) { 

     if isRecording { 
      stopRecording() 
     } else { 
      startRecording() 
     } 
    } 
} 
+0

아무도없는 정보가 있습니까? 아무도 이것을 시도하지 않았습니까? –

+0

당신이 방법을 발견한다면, 애플은 알게 될 것입니다, 그들은 회전을 때때로 잘못받는 것 같습니다 :) – Mindaugas

답변

0

내 문제에 대한 해결책을 찾았습니다. 해결 방법은 AVAssetExportSession을 사용하여 비디오를 내보내 비디오 크기 설정을 처리 한 다음 기록하는 동안이 아닌 내보낼 때의 회전을 처리하는 것입니다. 원래의 비디오 크기에서 더 작은 640x480 해상도로 이동하기 위해 스케일 요소를 수정해야했지만 여전히 문제는 발생했지만 적어도 회전 문제는 해결했습니다. 아래의 업데이트 된 코드를 참조하십시오.

import UIKit 
import AVFoundation 

class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate { 

    //MARK: - Outlet 

    @IBOutlet weak var previewView: UIView! 
    @IBOutlet var playStopButton: UIButton! 

    //MARK: - Private Variables 

    let videoQueue = DispatchQueue(label: "VideoQueue", qos: .background, attributes: .concurrent, autoreleaseFrequency: .workItem, target: nil) 
    private let captureSession = AVCaptureSession() 

    var outputUrl: URL { 
     get { 

      if let url = _outputUrl { 
       return url 
      } 

      _outputUrl = outputDirectory.appendingPathComponent("video.mp4") 
      return _outputUrl! 
     } 
    } 

    private var _outputUrl: URL? 

    var exportUrl: URL { 
     get { 

      if let url = _exportUrl { 
       return url 
      } 

      _exportUrl = outputDirectory.appendingPathComponent("video_encoded.mp4") 
      return _exportUrl! 
     } 
    } 

    private var _exportUrl: URL? 

    var outputDirectory: URL { 
     get { 

      if let url = _outputDirectory { 
       return url 
      } 

      _outputDirectory = getDocumentsDirectory().appendingPathComponent("recording") 
      return _outputDirectory! 
     } 
    } 

    private var _outputDirectory: URL? 

    private var assetWriter: AVAssetWriter? 
    private var videoInput: AVAssetWriterInput? 
    private var audioInput: AVAssetWriterInput? 
    private var videoOutput: AVCaptureVideoDataOutput? 
    private var audioOutput: AVCaptureAudioDataOutput? 

    private var isRecording = false 
    private var isWriting = false 

    private var videoSize = CGSize(width: 640, height: 480) 
    private var exportPreset = AVAssetExportPreset640x480 

    //MARK: - View Life-cycle 

    override func viewDidLoad() { 
     super.viewDidLoad() 

     videoQueue.async { 

      do { 

       try self.configureCaptureSession() 

       DispatchQueue.main.sync { 
        self.configurePreview() 
       } 

      } catch { 

       DispatchQueue.main.async { 
        self.showAlert("Unable to configure capture session") 
       } 
      } 
     } 
    } 

    override var preferredInterfaceOrientationForPresentation: UIInterfaceOrientation { 
     return .portrait 
    } 

    override var supportedInterfaceOrientations: UIInterfaceOrientationMask { 
     return .portrait 
    } 

    //MARK: - Capture Session 

    private func configureCaptureSession() throws { 

     do { 

      // configure capture devices 
      let camDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeVideo) 
      let micDevice = AVCaptureDevice.defaultDevice(withMediaType: AVMediaTypeAudio) 

      let camInput = try AVCaptureDeviceInput(device: camDevice) 
      let micInput = try AVCaptureDeviceInput(device: micDevice) 

      if captureSession.canAddInput(camInput) { 
       captureSession.addInput(camInput) 
      } 

      if captureSession.canAddInput(micInput) { 
       captureSession.addInput(micInput) 
      } 

      // configure audio/video output 
      videoOutput = AVCaptureVideoDataOutput() 
      videoOutput?.alwaysDiscardsLateVideoFrames = false // TODO: is this necessary? 
      videoOutput?.setSampleBufferDelegate(self, queue: videoQueue) 

      if let v = videoOutput { 
       captureSession.addOutput(v) 
      } 

      audioOutput = AVCaptureAudioDataOutput() 
      audioOutput?.setSampleBufferDelegate(self, queue: videoQueue) 

      if let a = audioOutput { 
       captureSession.addOutput(a) 
      } 

      // configure audio session 
      let audioSession = AVAudioSession.sharedInstance() 
      try audioSession.setCategory(AVAudioSessionCategoryPlayAndRecord) 
      try audioSession.setActive(true) 

      var micPort: AVAudioSessionPortDescription? 

      if let inputs = audioSession.availableInputs { 
       for port in inputs { 
        if port.portType == AVAudioSessionPortBuiltInMic { 
         micPort = port 
         break; 
        } 
       } 
      } 

      if let port = micPort, let dataSources = port.dataSources { 

       for source in dataSources { 
        if source.orientation == AVAudioSessionOrientationFront { 
         try audioSession.setPreferredInput(port) 
         break 
        } 
       } 
      } 

     } catch { 
      print("Failed to configure audio/video capture session") 
      throw error 
     } 
    } 

    private func configureAssetWriter() throws { 

     prepareVideoFile() 

     do { 

      if assetWriter != nil { 
       assetWriter = nil 
       videoInput = nil 
       audioInput = nil 
      } 

      assetWriter = try AVAssetWriter(url: outputUrl, fileType: AVFileTypeMPEG4) 

      guard let writer = assetWriter else { 
       print("Asset writer not created") 
       return 
      } 

      let videoSettings: [String: Any] = [AVVideoCodecKey: AVVideoCodecH264, 
           AVVideoWidthKey: NSNumber(value: Float(videoSize.width)), 
           AVVideoHeightKey: NSNumber(value: Float(videoSize.height))] 

      videoInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: videoSettings) 
      videoInput?.expectsMediaDataInRealTime = true 

      var channelLayout = AudioChannelLayout() 
      memset(&channelLayout, 0, MemoryLayout<AudioChannelLayout>.size); 
      channelLayout.mChannelLayoutTag = kAudioChannelLayoutTag_Stereo; 

      let audioSettings: [String: Any] = [AVFormatIDKey: kAudioFormatMPEG4AAC, 
               AVSampleRateKey: 44100, 
               AVNumberOfChannelsKey: 2] 

      audioInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: audioSettings) 
      audioInput?.expectsMediaDataInRealTime = true 

      guard let vi = videoInput else { 
       print("Video input not configured") 
       return 
      } 

      guard let ai = audioInput else { 
       print("Audio input not configured") 
       return 
      } 

      if writer.canAdd(vi) { 
       writer.add(vi) 
      } 

      if writer.canAdd(ai) { 
       writer.add(ai) 
      } 

     } catch { 
      print("Failed to configure asset writer") 
      throw error 
     } 
    } 

    private func prepareVideoFile() { 

     if FileManager.default.fileExists(atPath: outputUrl.path) { 

      do { 
       try FileManager.default.removeItem(at: outputUrl) 
      } catch { 
       print("Unable to remove file at URL \(outputUrl)") 
      } 
     } 

     if !FileManager.default.fileExists(atPath: outputDirectory.path) { 

      do { 
       try FileManager.default.createDirectory(at: outputDirectory, withIntermediateDirectories: true, attributes: nil) 
      } catch { 
       print("Unable to create directory at URL \(outputDirectory)") 
      } 
     } 
    } 

    private func configurePreview() { 

     if let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession) { 
      previewLayer.frame = previewView.bounds 
      previewView.layer.addSublayer(previewLayer) 
     } 
    } 

    private func getVideoSize() -> CGSize { 

     if UIDevice.current.orientation == .landscapeLeft || UIDevice.current.orientation == .landscapeRight { 

      if videoSize.width > videoSize.height { 
       return videoSize 
      } else { 
       return CGSize(width: videoSize.height, height: videoSize.width) 
      } 

     } else { 

      if videoSize.width < videoSize.height { 
       return videoSize 
      } else { 
       return CGSize(width: videoSize.height, height: videoSize.width) 
      } 
     } 
    } 

    //MARK: - Controls 

    private func startRecording() { 

     videoQueue.async { 

      do { 
       try self.configureAssetWriter() 
       self.captureSession.startRunning() 

      } catch { 
       print("Unable to start recording") 
       DispatchQueue.main.async { self.showAlert("Unable to start recording") } 
      } 
     } 

     isRecording = true 
     playStopButton.setTitle("Stop Recording", for: .normal) 
     print("Recording did start") 
    } 

    private func stopRecording() { 

     if !isRecording { 
      return 
     } 

     videoQueue.async { 

      self.assetWriter?.finishWriting { 
       print("Asset writer did finish writing") 
       self.isWriting = false 
      } 

      self.captureSession.stopRunning() 

      do { 
       try self.export() 
      } catch { 
       print("Export failed") 
       DispatchQueue.main.async { self.showAlert("Unable to export video") } 
      } 
     } 

     isRecording = false 

     playStopButton.setTitle("Start Recording", for: .normal) 
     print("Recording did stop") 
    } 

    //MARK: - AVCaptureVideoDataOutputSampleBufferDelegate 

    func captureOutput(_ captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, from connection: AVCaptureConnection!) { 

     guard let w = assetWriter else { 
      print("Asset writer not configured") 
      return 
     } 

     guard let vo = videoOutput else { 
      print("Video output not configured") 
      return 
     } 

     guard let ao = audioOutput else { 
      print("Audio output not configured") 
      return 
     } 

     guard let vi = videoInput else { 
      print("Video input not configured") 
      return 
     } 

     guard let ai = audioInput else { 
      print("Audio input not configured") 
      return 
     } 

     let st = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) 

     print("Writer status \(w.status.rawValue)") 

     if let e = w.error { 
      print("Writer error \(e)") 
      stopRecording() 
      return 
     } 

     switch w.status { 

     case .unknown: 

      if !isWriting { 
       isWriting = true 
       w.startWriting() 
       w.startSession(atSourceTime: st) 
      } 

      return 

     case .completed: 
      print("Video writing completed") 
      return 

     case .cancelled: 
      print("Video writing cancelled") 
      return 

     case .failed: 
      print("Video writing failed") 
      return 

     default: 
      print("Video is writing") 
     } 

     if vo == captureOutput { 

      if !vi.append(sampleBuffer) { 
       print("Unable to write to video buffer") 
      } 

     } else if ao == captureOutput { 

      if !ai.append(sampleBuffer) { 
       print("Unable to write to audio buffer") 
      } 
     } 
    } 

    //MARK: - Export 

    private func getVideoComposition(asset: AVAsset, videoSize: CGSize) -> AVMutableVideoComposition? { 

     guard let videoTrack = asset.tracks(withMediaType: AVMediaTypeVideo).first else { 
      print("Unable to get video tracks") 
      return nil 
     } 

     let videoComposition = AVMutableVideoComposition() 
     videoComposition.renderSize = videoSize 

     let seconds: Float64 = Float64(1.0/videoTrack.nominalFrameRate) 
     videoComposition.frameDuration = CMTimeMakeWithSeconds(seconds, 600); 

     let layerInst = AVMutableVideoCompositionLayerInstruction(assetTrack: videoTrack) 

     var transforms = asset.preferredTransform 

     var isPortrait = true; 

     if (transforms.a == 0.0 && transforms.b == 1.0 && transforms.c == -1.0 && transforms.d == 0) 
     || (transforms.a == 0.0 && transforms.b == -1.0 && transforms.c == 1.0 && transforms.d == 0) { 
      isPortrait = false; 
     } 

     if isPortrait { 
      transforms = transforms.concatenating(CGAffineTransform(rotationAngle: CGFloat(90.0.degreesToRadians))) 
      transforms = transforms.concatenating(CGAffineTransform(translationX: videoSize.width, y: 0)) 
     } 

     layerInst.setTransform(transforms, at: kCMTimeZero) 

     let inst = AVMutableVideoCompositionInstruction() 
     inst.backgroundColor = UIColor.black.cgColor 
     inst.layerInstructions = [layerInst] 
     inst.timeRange = CMTimeRange(start: kCMTimeZero, duration: asset.duration) 

     videoComposition.instructions = [inst] 

     return videoComposition 

    } 

    private func export() throws { 

     let videoAsset = AVURLAsset(url: outputUrl) 

     if FileManager.default.fileExists(atPath: exportUrl.path) { 
      try FileManager.default.removeItem(at: exportUrl) 
     } 

     let videoSize = getVideoSize() 

     guard let encoder = AVAssetExportSession(asset: videoAsset, presetName: exportPreset) else { 
      print("Unable to create encoder") 
      return 
     } 

     guard let vidcomp = getVideoComposition(asset: videoAsset, videoSize: videoSize) else { 
      print("Unable to create video composition") 
      return 
     } 

     encoder.videoComposition = vidcomp 
     encoder.outputFileType = AVFileTypeMPEG4 // MP4 format 
     encoder.outputURL = exportUrl 
     encoder.shouldOptimizeForNetworkUse = true 

     encoder.exportAsynchronously(completionHandler: { 
      print("Video exported successfully") 
     }) 
    } 

    //MARK: Helpers 

    private func getDocumentsDirectory() -> URL { 
     let paths = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask) 
     let documentsDirectory = paths[0] 
     return documentsDirectory 
    } 

    //MARK: Actions 

    @IBAction func startStopTapped(sender: AnyObject) { 

     if isRecording { 
      stopRecording() 
     } else { 
      startRecording() 
     } 
    } 
}