2014-03-12 3 views
3

비디오 브로드 캐스트에 FFmpegFrameRecorder을 사용하고 있습니다. 오디오가 비디오 프레임보다 빠릅니다. 다음 코드를 사용하고 있지만 완전한 비디오를 생성 할 수 없습니다. 오디오 비디오에 문제가 있습니다. 타임 스탬프.FFmpegFrameRecorder video 브로드 캐스팅 오디오가 3G 네트워크의 비디오 프레임보다 빠릅니다.

자바 코드 :

import static com.googlecode.javacv.cpp.opencv_core.IPL_DEPTH_8U; 

import java.io.IOException; 
import java.nio.ShortBuffer; 

import android.app.Activity; 
import android.content.Context; 
import android.content.pm.ActivityInfo; 
import android.graphics.Bitmap; 
import android.graphics.Canvas; 
import android.hardware.Camera; 
import android.hardware.Camera.PreviewCallback; 
import android.media.AudioFormat; 
import android.media.AudioRecord; 
import android.media.MediaRecorder; 
import android.os.Bundle; 
import android.os.PowerManager; 
import android.util.Log; 
import android.view.KeyEvent; 
import android.view.SurfaceHolder; 
import android.view.SurfaceView; 
import android.view.View; 
import android.view.View.OnClickListener; 
import android.view.ViewGroup.LayoutParams; 
import android.widget.Button; 
import android.widget.LinearLayout; 

import com.googlecode.javacv.FFmpegFrameRecorder; 
import com.googlecode.javacv.cpp.opencv_core.IplImage; 

public class MainActivity extends Activity implements OnClickListener { 

    private final static String LOG_TAG = "MainActivity"; 

    private PowerManager.WakeLock mWakeLock; 

    private String ffmpeg_link = ""; 

    private volatile FFmpegFrameRecorder recorder; 
    boolean recording = false; 
    long startTime = 0; 

    private int sampleAudioRateInHz = 16000; 
    private int imageWidth = 320; 
    private int imageHeight = 240; 
    private int frameRate = 24; 

    private Thread audioThread; 
    volatile boolean runAudioThread = true; 
    private AudioRecord audioRecord; 
    private AudioRecordRunnable audioRecordRunnable; 

    private CameraView cameraView; 
    private IplImage yuvIplimage = null; 

    private Button recordButton; 
    private LinearLayout mainLayout; 

    @Override 
    public void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 

     setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE); 
     setContentView(R.layout.activity_main); 

     initLayout(); 
     initRecorder(); 
    } 

    @Override 
    protected void onResume() { 
     super.onResume(); 

     if (mWakeLock == null) { 
      PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); 
      mWakeLock = pm.newWakeLock(PowerManager.SCREEN_BRIGHT_WAKE_LOCK, 
        LOG_TAG); 
      mWakeLock.acquire(); 
     } 
    } 

    @Override 
    protected void onPause() { 
     super.onPause(); 

     if (mWakeLock != null) { 
      mWakeLock.release(); 
      mWakeLock = null; 
     } 
    } 

    @Override 
    protected void onDestroy() { 
     super.onDestroy(); 

     recording = false; 
    } 

    private void initLayout() { 

     mainLayout = (LinearLayout) this.findViewById(R.id.record_layout); 

     recordButton = (Button) findViewById(R.id.recorder_control); 
     recordButton.setText("Start"); 
     recordButton.setOnClickListener(this); 

     cameraView = new CameraView(this); 

     LinearLayout.LayoutParams layoutParam = new LinearLayout.LayoutParams(
       LayoutParams.MATCH_PARENT, LayoutParams.MATCH_PARENT); 
     mainLayout.addView(cameraView, layoutParam); 
     Log.v(LOG_TAG, "added cameraView to mainLayout"); 
    } 

    private void initRecorder() { 
     Log.w(LOG_TAG, "initRecorder"); 

     if (yuvIplimage == null) { 
      // Recreated after frame size is set in surface change method 
      yuvIplimage = IplImage.create(imageWidth, imageHeight, 
        IPL_DEPTH_8U, 2); 
      // yuvIplimage = IplImage.create(imageWidth, imageHeight, 
      // IPL_DEPTH_32S, 2); 

      Log.v(LOG_TAG, "IplImage.create"); 
     } 

     recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, 
       imageHeight, 1); 
     Log.v(LOG_TAG, "FFmpegFrameRecorder: " + ffmpeg_link + " imageWidth: " 
       + imageWidth + " imageHeight " + imageHeight); 

     recorder.setFormat("flv"); 
     Log.v(LOG_TAG, "recorder.setFormat(\"flv\")"); 

     recorder.setSampleRate(sampleAudioRateInHz); 
     Log.v(LOG_TAG, "recorder.setSampleRate(sampleAudioRateInHz)"); 

     // re-set in the surface changed method as well 
     recorder.setFrameRate(frameRate); 
     Log.v(LOG_TAG, "recorder.setFrameRate(frameRate)"); 

     // Create audio recording thread 
     audioRecordRunnable = new AudioRecordRunnable(); 
     audioThread = new Thread(audioRecordRunnable); 
    } 

    // Start the capture 
    public void startRecording() { 
     try { 
      recorder.start(); 
      startTime = System.currentTimeMillis(); 
      recording = true; 
      audioThread.start(); 
     } catch (FFmpegFrameRecorder.Exception e) { 
      e.printStackTrace(); 
     } 
    } 

    public void stopRecording() { 
     // This should stop the audio thread from running 
     runAudioThread = false; 

     if (recorder != null && recording) { 
      recording = false; 
      Log.v(LOG_TAG, 
        "Finishing recording, calling stop and release on recorder"); 
      try { 
       recorder.stop(); 
       recorder.release(); 
      } catch (FFmpegFrameRecorder.Exception e) { 
       e.printStackTrace(); 
      } 
      recorder = null; 
     } 
    } 

    @Override 
    public boolean onKeyDown(int keyCode, KeyEvent event) { 
     // Quit when back button is pushed 
     if (keyCode == KeyEvent.KEYCODE_BACK) { 
      if (recording) { 
       stopRecording(); 
      } 
      finish(); 
      return true; 
     } 
     return super.onKeyDown(keyCode, event); 
    } 

    @Override 
    public void onClick(View v) { 
     if (!recording) { 
      startRecording(); 
      Log.w(LOG_TAG, "Start Button Pushed"); 
      recordButton.setText("Stop"); 
     } else { 
      stopRecording(); 
      Log.w(LOG_TAG, "Stop Button Pushed"); 
      recordButton.setText("Start"); 
     } 
    } 

    // --------------------------------------------- 
    // audio thread, gets and encodes audio data 
    // --------------------------------------------- 
    class AudioRecordRunnable implements Runnable { 

     @Override 
     public void run() { 
      // Set the thread priority 
      android.os.Process 
        .setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO); 

      // Audio 
      int bufferSize; 
      short[] audioData; 
      int bufferReadResult; 

      bufferSize = AudioRecord.getMinBufferSize(sampleAudioRateInHz, 
        AudioFormat.CHANNEL_CONFIGURATION_MONO, 
        AudioFormat.ENCODING_PCM_16BIT); 
      audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, 
        sampleAudioRateInHz, 
        AudioFormat.CHANNEL_CONFIGURATION_MONO, 
        AudioFormat.ENCODING_PCM_16BIT, bufferSize); 

      audioData = new short[bufferSize]; 

      Log.d(LOG_TAG, "audioRecord.startRecording()"); 
      audioRecord.startRecording(); 

      // Audio Capture/Encoding Loop 
      while (runAudioThread) { 
       // Read from audioRecord 
       bufferReadResult = audioRecord.read(audioData, 0, 
         audioData.length); 
       if (bufferReadResult > 0) { 
        // Log.v(LOG_TAG,"audioRecord bufferReadResult: " + 
        // bufferReadResult); 

        // Changes in this variable may not be picked up despite it 
        // being "volatile" 
        if (recording) { 
         try { 
          // Write to FFmpegFrameRecorder 
          recorder.record(ShortBuffer.wrap(audioData, 0, 
            bufferReadResult)); 
         } catch (FFmpegFrameRecorder.Exception e) { 
          Log.v(LOG_TAG, e.getMessage()); 
          e.printStackTrace(); 
         } 
        } 
       } 
      } 
      Log.v(LOG_TAG, "AudioThread Finished"); 

      /* Capture/Encoding finished, release recorder */ 
      if (audioRecord != null) { 
       audioRecord.stop(); 
       audioRecord.release(); 
       audioRecord = null; 
       Log.v(LOG_TAG, "audioRecord released"); 
      } 
     } 
    } 

    class CameraView extends SurfaceView implements SurfaceHolder.Callback, 
      PreviewCallback { 

     private boolean previewRunning = false; 

     private SurfaceHolder holder; 
     private Camera camera; 

     private byte[] previewBuffer; 

     long videoTimestamp = 0; 

     Bitmap bitmap; 
     Canvas canvas; 

     public CameraView(Context _context) { 
      super(_context); 

      holder = this.getHolder(); 
      holder.addCallback(this); 
      holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 
     } 

     @Override 
     public void surfaceCreated(SurfaceHolder holder) { 
      camera = Camera.open(); 

      try { 
       camera.setPreviewDisplay(holder); 
       camera.setPreviewCallback(this); 

       Camera.Parameters currentParams = camera.getParameters(); 
       Log.v(LOG_TAG, 
         "Preview Framerate: " 
           + currentParams.getPreviewFrameRate()); 
       Log.v(LOG_TAG, 
         "Preview imageWidth: " 
           + currentParams.getPreviewSize().width 
           + " imageHeight: " 
           + currentParams.getPreviewSize().height); 

       // Use these values 
       imageWidth = currentParams.getPreviewSize().width; 
       imageHeight = currentParams.getPreviewSize().height; 
       frameRate = currentParams.getPreviewFrameRate(); 

       bitmap = Bitmap.createBitmap(imageWidth, imageHeight, 
         Bitmap.Config.ALPHA_8); 

       /* 
       * Log.v(LOG_TAG,"Creating previewBuffer size: " + imageWidth * 
       * imageHeight * 
       * ImageFormat.getBitsPerPixel(currentParams.getPreviewFormat 
       *())/8); previewBuffer = new byte[imageWidth * imageHeight * 
       * ImageFormat 
       * .getBitsPerPixel(currentParams.getPreviewFormat())/8]; 
       * camera.addCallbackBuffer(previewBuffer); 
       * camera.setPreviewCallbackWithBuffer(this); 
       */ 

       camera.startPreview(); 
       previewRunning = true; 
      } catch (IOException e) { 
       Log.v(LOG_TAG, e.getMessage()); 
       e.printStackTrace(); 
      } 
     } 

     public void surfaceChanged(SurfaceHolder holder, int format, int width, 
       int height) { 
      Log.v(LOG_TAG, "Surface Changed: width " + width + " height: " 
        + height); 

      // We would do this if we want to reset the camera parameters 
      /* 
      * if (!recording) { if (previewRunning){ camera.stopPreview(); } 
      * 
      * try { //Camera.Parameters cameraParameters = 
      * camera.getParameters(); //p.setPreviewSize(imageWidth, 
      * imageHeight); //p.setPreviewFrameRate(frameRate); 
      * //camera.setParameters(cameraParameters); 
      * 
      * camera.setPreviewDisplay(holder); camera.startPreview(); 
      * previewRunning = true; } catch (IOException e) { 
      * Log.e(LOG_TAG,e.getMessage()); e.printStackTrace(); } } 
      */ 

      // Get the current parameters 
      Camera.Parameters currentParams = camera.getParameters(); 
      Log.v(LOG_TAG, 
        "Preview Framerate: " + currentParams.getPreviewFrameRate()); 
      Log.v(LOG_TAG, 
        "Preview imageWidth: " 
          + currentParams.getPreviewSize().width 
          + " imageHeight: " 
          + currentParams.getPreviewSize().height); 

      // Use these values 
      imageWidth = currentParams.getPreviewSize().width; 
      imageHeight = currentParams.getPreviewSize().height; 
      frameRate = currentParams.getPreviewFrameRate(); 

      // Create the yuvIplimage if needed 
      yuvIplimage = IplImage.create(imageWidth, imageHeight, 
        IPL_DEPTH_8U, 1); 
      // yuvIplimage = IplImage.create(imageWidth, imageHeight, 
      // IPL_DEPTH_32S, 2); 
     } 

     @Override 
     public void surfaceDestroyed(SurfaceHolder holder) { 
      try { 
       camera.setPreviewCallback(null); 

       previewRunning = false; 
       camera.release(); 

      } catch (RuntimeException e) { 
       Log.v(LOG_TAG, e.getMessage()); 
       e.printStackTrace(); 
      } 
     } 

     @Override 
     public void onPreviewFrame(byte[] data, Camera camera) { 

      if (yuvIplimage != null && recording) { 
       videoTimestamp = 1000 * (System.currentTimeMillis() - startTime); 

       // Put the camera preview frame right into the yuvIplimage 
       // object 
       System.out.println("value of data=============" + data); 
       yuvIplimage.getByteBuffer().put(data); 

       // FAQ about IplImage: 
       // - For custom raw processing of data, getByteBuffer() returns 
       // an NIO direct 
       // buffer wrapped around the memory pointed by imageData, and 
       // under Android we can 
       // also use that Buffer with Bitmap.copyPixelsFromBuffer() and 
       // copyPixelsToBuffer(). 
       // - To get a BufferedImage from an IplImage, we may call 
       // getBufferedImage(). 
       // - The createFrom() factory method can construct an IplImage 
       // from a BufferedImage. 
       // - There are also a few copy*() methods for 
       // BufferedImage<->IplImage data transfers. 

       // Let's try it.. 
       // This works but only on transparency 
       // Need to find the right Bitmap and IplImage matching types 

       /* 
       * bitmap.copyPixelsFromBuffer(yuvIplimage.getByteBuffer()); 
       * //bitmap.setPixel(10,10,Color.MAGENTA); 
       * 
       * canvas = new Canvas(bitmap); Paint paint = new Paint(); 
       * paint.setColor(Color.GREEN); float leftx = 20; float topy = 
       * 20; float rightx = 50; float bottomy = 100; RectF rectangle = 
       * new RectF(leftx,topy,rightx,bottomy); 
       * canvas.drawRect(rectangle, paint); 
       * 
       * bitmap.copyPixelsToBuffer(yuvIplimage.getByteBuffer()); 
       */ 
       // Log.v(LOG_TAG,"Writing Frame"); 

       try { 

        // Get the correct time 
        recorder.setTimestamp(videoTimestamp); 

        // Record the image into FFmpegFrameRecorder 
        recorder.record(yuvIplimage); 

       } catch (FFmpegFrameRecorder.Exception e) { 
        Log.v(LOG_TAG, e.getMessage()); 
        e.printStackTrace(); 
       } 
      } 
     } 
    } 
} 

매니페스트

<manifest xmlns:android="http://schemas.android.com/apk/res/android" 
    package="com.example.javacv.stream.test2" 
    android:versionCode="1" 
    android:versionName="1.0" > 

    <uses-sdk 
     android:minSdkVersion="8" 
     android:targetSdkVersion="15" /> 

    <uses-permission android:name="android.permission.INTERNET" /> 
    <uses-permission android:name="android.permission.CAMERA" /> 
    <uses-permission android:name="android.permission.WAKE_LOCK" /> 
    <uses-permission android:name="android.permission.RECORD_AUDIO" /> 
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> 

    <application 
     android:icon="@drawable/ic_launcher" 
     android:label="@string/app_name" 
     android:theme="@style/AppTheme" > 
     <activity 
      android:name="com.example.javacv.stream.test2.MainActivity" 
      android:label="@string/title_activity_main" > 
      <intent-filter> 
       <action android:name="android.intent.action.MAIN" /> 

       <category android:name="android.intent.category.LAUNCHER" /> 
      </intent-filter> 
     </activity> 
    </application> 

</manifest> 
+0

당신이 "빨리 온다"무엇을 의미합니까? 오디오가 깨지십니까? 아니면 단순히 동기화되지 않았습니까? 어쨌든, 당신의 장치는 단지 실시간으로 인코딩하기에는 충분히 빠르지 않을 것 같습니다 :'start()'전에'FrameRecorder.setVideoBitrate()'를 호출하여 비트율을 줄이려고 시도 했습니까? –

+0

@SamuelAudet 이미 모든 프로세스를 사용했지만 나에게 적합하지 않습니다. recorder.setAudioCodec (avcodec.AV_CODEC_ID_AAC); \t \t \t recorder.setVideoCodec (avcodec.AV_CODEC_ID_FLV1); \t \t \t recorder.setAudioChannels (2); \t \t \t recorder.setVideoBitrate (1200); \t \t \t recorder.setAudioBitrate (80); – Dilip

+1

메일 링리스트의 다른 사용자가 일부 기기에서만 동일한 문제를 겪고 있습니다. 그것은 특정 장치에서만 발생하는 것과 관련이있는 것 같습니다 ... –

답변

2

이것은 우리가뿐만 아니라 아이폰 OS 측과 싸웠다 알려진 문제가 .. 기본적으로 비디오의 패킷이 삭제 얻을되는 반면, 오디오는 계속 가고, 모두 지옥에갑니다. 저 대역폭의 일부 하드웨어는 훌륭하게 작동하지 않고 동기화되지 않습니다. 단단한 문제가 있다고 생각하지 않습니다. 오디오/비디오 및 재 동기화를 위해 자체 버퍼를 구축하여 해킹해야했습니다. 타임 스탬프, 프레임 크기 및 패킷 수를 사용합니다.

내가 그 코드를하지만 프로토콜을 알고 있다면, 다시 열심히해서는 안됩니다 (그것은 내 게시 아니다) 게시 할 수 없습니다 두려워 ..

+0

정말 고맙습니다.하지만 똑같은 것을 시도해 보았고 해결책을 얻지 못했습니다. 고마워. – Dilip