2014-10-30 2 views
1

저는 안드로이드 코딩에 초보자입니다. (자바는 아니지만) 저는 Google Glass 용 간단한 카메라 앱을 만들려고합니다. 몇 분마다 자동으로 사진을 찍어 입력을 처리하기를 원하지만 Google Glass는 기본 Camera 구현을 사용하는 경우 모든 사진에 대해 '수락 할 수 있도록 탭'해야합니다. 그래서 안드로이드 카메라 API를 사용하여 사진을 찍으려고합니다. 그래서이 "탭하여 받아들이 기"를 건너 뛸 수 있습니다.Google Glass 앱이 takePicture와 함께 실패했습니다.

그러나 미리보기가 표시되는 동안 PictureCallback은 호출되지 않으므로 결과를 주 Activity로 다시 보내려고하면 NullPointerException이 throw됩니다.

현재 코드는 웹상의 모든 잠재적 인 문제 해결 방법의 일종으로, 미안하다면 지저분하다.

내 카메라 활동 클래스 :이 같은 주요 활동에 전화

package com.example.cerveau.blah; 

import android.app.Activity; 
import android.content.Intent; 
import android.hardware.Camera; 
import android.hardware.Camera.PictureCallback; 
import android.net.Uri; 
import android.os.Bundle; 
import android.os.Environment; 
import android.util.Log; 
import android.widget.FrameLayout; 

import java.io.File; 
import java.io.FileNotFoundException; 
import java.io.FileOutputStream; 
import java.io.IOException; 
import java.text.SimpleDateFormat; 
import java.util.Date; 

public class CameraActivity extends Activity { 

    private Camera mCamera; 
    private CameraPreview mPreview; 
    private Intent resultIntent; 
    private PictureCallback mPicture; 
    public static final int MEDIA_TYPE_IMAGE = 1; 
    public static final int MEDIA_TYPE_VIDEO = 2; 
    private static final String TAG = "CameraActivity"; 

    @Override 
    public void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.recognize_places); 

     // Create an instance of Camera 
     mCamera = getCameraInstance(); 

     // Make the callback 
     mPicture = new PictureCallback() { 

      private static final String TAG = "PictureCallback"; 

      @Override 
      public void onPictureTaken(byte[] data, Camera camera) { 

       File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE); 
       if (pictureFile == null){ 
        Log.d(TAG, "Error creating media file, check storage permissions: "); 
        return; 
       } 

       try { 
        FileOutputStream fos = new FileOutputStream(pictureFile); 
        fos.write(data); 
        fos.close(); 
       } catch (FileNotFoundException e) { 
        Log.d(TAG, "File not found: " + e.getMessage()); 
       } catch (IOException e) { 
        Log.d(TAG, "Error accessing file: " + e.getMessage()); 
       } 
       Log.d(TAG, "Callback made and picture taken!"); 
      } 
     }; 

     // Create our Preview view and set it as the content of our activity. 
     mPreview = new CameraPreview(this, mCamera); 
     FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview); 
     preview.addView(mPreview); 
     Log.d(TAG, "Preview made!"); 

     mCamera.startPreview(); 

     // have a delay so the camera can set up 
     try { 
      Thread.sleep(1000); 
     } catch (InterruptedException e) { 
      e.printStackTrace(); 
     } 

     mCamera.takePicture(null, null, mPicture); 
     setIntent(getOutputMediaFileUri(MEDIA_TYPE_IMAGE)); 
     releaseCamera(); 

    } 


    public void setIntent(Uri photoURI){ 
     resultIntent = new Intent(); 
     resultIntent.setData(photoURI); 
     setResult(Activity.RESULT_OK, resultIntent); 
     finish(); 
    } 

    public static Camera getCameraInstance(){ 
     Camera c = null; 
     try { 
      c = Camera.open(); // attempt to get a Camera instance 
     } 
     catch (Exception e){ 
      // Camera is not available (in use or does not exist) 
     } 

     // Parameters needed for Google Glass 
     c.setDisplayOrientation(0); 
     Camera.Parameters params = c.getParameters(); 
     params.setPreviewFpsRange(30000, 30000); 
     params.setJpegQuality(90); 
// hard-coding is bad, but I'm a bit lazy 
     params.setPictureSize(640, 480); 
     params.setPreviewSize(640, 480); 
     c.setParameters(params); 

     return c; // returns null if camera is unavailable 
    } 


    @Override 
    protected void onPause() { 
     super.onPause(); 
     releaseCamera();    // release the camera immediately on pause event 
    } 

    private void releaseCamera(){ 
     if (mCamera != null){ 
      mCamera.release();  // release the camera for other applications 
      mCamera = null; 
     } 
    } 

    /** Create a file Uri for saving an image or video */ 
    private static Uri getOutputMediaFileUri(int type){ 
     return Uri.fromFile(getOutputMediaFile(type)); 
    } 

    /** Create a File for saving an image or video */ 
    private static File getOutputMediaFile(int type){ 
     // To be safe, you should check that the SDCard is mounted 
     // using Environment.getExternalStorageState() before doing this. 

     File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
       Environment.DIRECTORY_PICTURES), "MyCameraApp"); 
     // This location works best if you want the created images to be shared 
     // between applications and persist after your app has been uninstalled. 

     // Create the storage directory if it does not exist 
     if (! mediaStorageDir.exists()){ 
      if (! mediaStorageDir.mkdirs()){ 
       Log.d("MyCameraApp", "failed to create directory"); 
       return null; 
      } 
     } 

     // Create a media file name 
     String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()); 
     File mediaFile; 
     if (type == MEDIA_TYPE_IMAGE){ 
      mediaFile = new File(mediaStorageDir.getPath() + File.separator + 
        "IMG_"+ timeStamp + ".jpg"); 
     } else if(type == MEDIA_TYPE_VIDEO) { 
      mediaFile = new File(mediaStorageDir.getPath() + File.separator + 
        "VID_"+ timeStamp + ".mp4"); 
     } else { 
      return null; 
     } 

     return mediaFile; 
    } 
} 

:

<uses-permission android:name="android.permission.CAMERA" /> 
<uses-permission android:name="com.google.android.glass.permission.DEVELOPMENT" /> 
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/> 
<uses-feature android:name="android.hardware.camera" android:required="true"/> 
:

Intent intent = new Intent(this, CameraActivity.class); 
    startActivityForResult(intent, CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE); 

나는 이미 내 AndroidManifest를 모든 반드시 권한이

다음은 오류 로그입니다.

10-30 18:00:58.599 11361-11361/com.example.cerveau.recognizeplaces D/OpenGLRenderer﹕ Enabling debug mode 0 
10-30 18:00:58.833 11361-11361/com.example.cerveau.recognizeplaces D/CameraActivity﹕ Preview made! 
10-30 18:01:08.654 11361-11361/com.example.cerveau.recognizeplaces I/Choreographer﹕ Skipped 601 frames! The application may be doing too much work on its main thread. 
10-30 18:01:08.677 11361-11361/com.example.cerveau.recognizeplaces I/RecogPlaces﹕ Got to onActivity 
10-30 18:01:08.677 11361-11361/com.example.cerveau.recognizeplaces I/RecogPlaces﹕ Request code: 100, Result code: -1, what it wants: -1 
10-30 18:01:08.677 11361-11361/com.example.cerveau.recognizeplaces I/RecogPlaces﹕ Got inside the IF 
10-30 18:01:08.685 11361-11361/com.example.cerveau.recognizeplaces D/AndroidRuntime﹕ Shutting down VM 
10-30 18:01:08.685 11361-11361/com.example.cerveau.recognizeplaces W/dalvikvm﹕ threadid=1: thread exiting with uncaught exception (group=0x41600bd8) 
10-30 18:01:08.685 11361-11361/com.example.cerveau.recognizeplaces E/AndroidRuntime﹕ FATAL EXCEPTION: main 
    Process: com.example.cerveau.recognizeplaces, PID: 11361 
    java.lang.RuntimeException: Failure delivering result ResultInfo{who=null, request=100, result=-1, data=Intent { dat=file:///storage/emulated/0/Pictures/MyCameraApp/IMG_20141030_180059.jpg }} to activity {com.example.cerveau.recognizeplaces/com.example.cerveau.recognizeplaces.LiveCardMenuActivity}: java.lang.NullPointerException 
      at android.app.ActivityThread.deliverResults(ActivityThread.java:3391) 
      at android.app.ActivityThread.handleSendResult(ActivityThread.java:3434) 
      at android.app.ActivityThread.access$1300(ActivityThread.java:138) 
      at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1284) 
      at android.os.Handler.dispatchMessage(Handler.java:102) 
      at android.os.Looper.loop(Looper.java:149) 
      at android.app.ActivityThread.main(ActivityThread.java:5045) 
      at java.lang.reflect.Method.invokeNative(Native Method) 
      at java.lang.reflect.Method.invoke(Method.java:515) 
      at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:786) 
      at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:602) 
      at dalvik.system.NativeStart.main(Native Method) 
    Caused by: java.lang.NullPointerException 
      at java.io.File.fixSlashes(File.java:185) 
      at java.io.File.<init>(File.java:134) 
      at com.example.cerveau.recognizeplaces.LiveCardMenuActivity.processPictureWhenReady(LiveCardMenuActivity.java:166) 
      at com.example.cerveau.recognizeplaces.LiveCardMenuActivity.onActivityResult(LiveCardMenuActivity.java:157) 
      at android.app.Activity.dispatchActivityResult(Activity.java:5430) 
      at android.app.ActivityThread.deliverResults(ActivityThread.java:3387) 
            at android.app.ActivityThread.handleSendResult(ActivityThread.java:3434) 
            at android.app.ActivityThread.access$1300(ActivityThread.java:138) 
            at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1284) 
            at android.os.Handler.dispatchMessage(Handler.java:102) 
            at android.os.Looper.loop(Looper.java:149) 
            at android.app.ActivityThread.main(ActivityThread.java:5045) 
            at java.lang.reflect.Method.invokeNative(Native Method) 
            at java.lang.reflect.Method.invoke(Method.java:515) 
            at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:786) 
            at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:602) 
            at dalvik.system.NativeStart.main(Native Method) 
10-30 18:01:09.130 11361-11361/com.example.cerveau.recognizeplaces I/Process﹕ Sending signal. PID: 11361 SIG: 9 

미리 감사드립니다.

답변

0

processPictureWhenReady() 메서드는 LiveCardMenuActivity 클래스에서 잘못되었습니다.

Thread.sleep(1000); 

이제까지, 결코, UI 스레드에서이 작업을 수행하지 마십시오 :

첫째, 이것은 아주 나쁜 일이다.
직접

mCamera.startPreview(); 
mCamera.takePicture(null, null, mPicture); 

그리고 onPictureTaken() 콜백의 끝에서 호출해야합니다 : 귀하의 제안에 대한

setIntent(pictureFile.getPath()); 
+0

감사합니다 ... 내가 그들을 시도하고 아무것도 작동하지 않습니다! Google Glass에서 작동하는 카메라 API 구현을 찾지 못하는 것 같습니다. 현재이 중 하나를 시도하고 있습니다. http://stackoverflow.com/questions/23073180/glass-slow-camera-fileobserver-notification-xe12 -using-action-image-capt 카메라 미리보기가 나타나지 않고 사진을 찍지 않습니다. MainActivity에서 뭔가 잘못하고있을 가능성이 있습니까? 나는 이것을 Intent라고 부르고 나서 StartActivityForResult를 시작한다. 또한이 SurfaceView를 사용 하 고 광산 그냥 내 애플에 frowny 얼굴 큰 구름으로 표시됩니다 ... – choupettes

+0

Glass 개발자 가이드를 보았습니까? https://developers.google.com/glass/develop/gdk/camera 저는 이것을 사용하여 유리 장치로 사진을 찍었습니다. –