2014-10-30 78 views
1

我是Android編程的新手(但不完全是Java),我正在爲Google Glass製作一個簡單的相機應用程序。我希望它每隔幾分鐘自動拍攝一張照片並處理輸入內容,但如果您使用本機Camera實現,則Google玻璃會強制您爲每張照片「點擊接受」。所以我試圖用Android Camera API來拍攝照片,這樣我就可以跳過這個「點擊接受」。Google Glass應用程序與takePicture失敗

但是,預覽顯示時,PictureCallback從不調用,因此嘗試將結果發送回主Activity時引發NullPointerException。

當前的代碼是在網絡上的各種潛在變通的混亂,抱歉,如果它是凌亂!

我的相機Activity類:

package com.example.cerveau.blah; 

import android.app.Activity; 
import android.content.Intent; 
import android.hardware.Camera; 
import android.hardware.Camera.PictureCallback; 
import android.net.Uri; 
import android.os.Bundle; 
import android.os.Environment; 
import android.util.Log; 
import android.widget.FrameLayout; 

import java.io.File; 
import java.io.FileNotFoundException; 
import java.io.FileOutputStream; 
import java.io.IOException; 
import java.text.SimpleDateFormat; 
import java.util.Date; 

public class CameraActivity extends Activity { 

    private Camera mCamera; 
    private CameraPreview mPreview; 
    private Intent resultIntent; 
    private PictureCallback mPicture; 
    public static final int MEDIA_TYPE_IMAGE = 1; 
    public static final int MEDIA_TYPE_VIDEO = 2; 
    private static final String TAG = "CameraActivity"; 

    @Override 
    public void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.recognize_places); 

     // Create an instance of Camera 
     mCamera = getCameraInstance(); 

     // Make the callback 
     mPicture = new PictureCallback() { 

      private static final String TAG = "PictureCallback"; 

      @Override 
      public void onPictureTaken(byte[] data, Camera camera) { 

       File pictureFile = getOutputMediaFile(MEDIA_TYPE_IMAGE); 
       if (pictureFile == null){ 
        Log.d(TAG, "Error creating media file, check storage permissions: "); 
        return; 
       } 

       try { 
        FileOutputStream fos = new FileOutputStream(pictureFile); 
        fos.write(data); 
        fos.close(); 
       } catch (FileNotFoundException e) { 
        Log.d(TAG, "File not found: " + e.getMessage()); 
       } catch (IOException e) { 
        Log.d(TAG, "Error accessing file: " + e.getMessage()); 
       } 
       Log.d(TAG, "Callback made and picture taken!"); 
      } 
     }; 

     // Create our Preview view and set it as the content of our activity. 
     mPreview = new CameraPreview(this, mCamera); 
     FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview); 
     preview.addView(mPreview); 
     Log.d(TAG, "Preview made!"); 

     mCamera.startPreview(); 

     // have a delay so the camera can set up 
     try { 
      Thread.sleep(1000); 
     } catch (InterruptedException e) { 
      e.printStackTrace(); 
     } 

     mCamera.takePicture(null, null, mPicture); 
     setIntent(getOutputMediaFileUri(MEDIA_TYPE_IMAGE)); 
     releaseCamera(); 

    } 


    public void setIntent(Uri photoURI){ 
     resultIntent = new Intent(); 
     resultIntent.setData(photoURI); 
     setResult(Activity.RESULT_OK, resultIntent); 
     finish(); 
    } 

    public static Camera getCameraInstance(){ 
     Camera c = null; 
     try { 
      c = Camera.open(); // attempt to get a Camera instance 
     } 
     catch (Exception e){ 
      // Camera is not available (in use or does not exist) 
     } 

     // Parameters needed for Google Glass 
     c.setDisplayOrientation(0); 
     Camera.Parameters params = c.getParameters(); 
     params.setPreviewFpsRange(30000, 30000); 
     params.setJpegQuality(90); 
// hard-coding is bad, but I'm a bit lazy 
     params.setPictureSize(640, 480); 
     params.setPreviewSize(640, 480); 
     c.setParameters(params); 

     return c; // returns null if camera is unavailable 
    } 


    @Override 
    protected void onPause() { 
     super.onPause(); 
     releaseCamera();    // release the camera immediately on pause event 
    } 

    private void releaseCamera(){ 
     if (mCamera != null){ 
      mCamera.release();  // release the camera for other applications 
      mCamera = null; 
     } 
    } 

    /** Create a file Uri for saving an image or video */ 
    private static Uri getOutputMediaFileUri(int type){ 
     return Uri.fromFile(getOutputMediaFile(type)); 
    } 

    /** Create a File for saving an image or video */ 
    private static File getOutputMediaFile(int type){ 
     // To be safe, you should check that the SDCard is mounted 
     // using Environment.getExternalStorageState() before doing this. 

     File mediaStorageDir = new File(Environment.getExternalStoragePublicDirectory(
       Environment.DIRECTORY_PICTURES), "MyCameraApp"); 
     // This location works best if you want the created images to be shared 
     // between applications and persist after your app has been uninstalled. 

     // Create the storage directory if it does not exist 
     if (! mediaStorageDir.exists()){ 
      if (! mediaStorageDir.mkdirs()){ 
       Log.d("MyCameraApp", "failed to create directory"); 
       return null; 
      } 
     } 

     // Create a media file name 
     String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date()); 
     File mediaFile; 
     if (type == MEDIA_TYPE_IMAGE){ 
      mediaFile = new File(mediaStorageDir.getPath() + File.separator + 
        "IMG_"+ timeStamp + ".jpg"); 
     } else if(type == MEDIA_TYPE_VIDEO) { 
      mediaFile = new File(mediaStorageDir.getPath() + File.separator + 
        "VID_"+ timeStamp + ".mp4"); 
     } else { 
      return null; 
     } 

     return mediaFile; 
    } 
} 

我把它的主要活動是這樣的:

Intent intent = new Intent(this, CameraActivity.class); 
    startActivityForResult(intent, CAPTURE_IMAGE_ACTIVITY_REQUEST_CODE); 

我已經在我的AndroidManifest的一定權限:

<uses-permission android:name="android.permission.CAMERA" /> 
<uses-permission android:name="com.google.android.glass.permission.DEVELOPMENT" /> 
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/> 
<uses-feature android:name="android.hardware.camera" android:required="true"/> 

以下是錯誤日誌:

10-30 18:00:58.599 11361-11361/com.example.cerveau.recognizeplaces D/OpenGLRenderer﹕ Enabling debug mode 0 
10-30 18:00:58.833 11361-11361/com.example.cerveau.recognizeplaces D/CameraActivity﹕ Preview made! 
10-30 18:01:08.654 11361-11361/com.example.cerveau.recognizeplaces I/Choreographer﹕ Skipped 601 frames! The application may be doing too much work on its main thread. 
10-30 18:01:08.677 11361-11361/com.example.cerveau.recognizeplaces I/RecogPlaces﹕ Got to onActivity 
10-30 18:01:08.677 11361-11361/com.example.cerveau.recognizeplaces I/RecogPlaces﹕ Request code: 100, Result code: -1, what it wants: -1 
10-30 18:01:08.677 11361-11361/com.example.cerveau.recognizeplaces I/RecogPlaces﹕ Got inside the IF 
10-30 18:01:08.685 11361-11361/com.example.cerveau.recognizeplaces D/AndroidRuntime﹕ Shutting down VM 
10-30 18:01:08.685 11361-11361/com.example.cerveau.recognizeplaces W/dalvikvm﹕ threadid=1: thread exiting with uncaught exception (group=0x41600bd8) 
10-30 18:01:08.685 11361-11361/com.example.cerveau.recognizeplaces E/AndroidRuntime﹕ FATAL EXCEPTION: main 
    Process: com.example.cerveau.recognizeplaces, PID: 11361 
    java.lang.RuntimeException: Failure delivering result ResultInfo{who=null, request=100, result=-1, data=Intent { dat=file:///storage/emulated/0/Pictures/MyCameraApp/IMG_20141030_180059.jpg }} to activity {com.example.cerveau.recognizeplaces/com.example.cerveau.recognizeplaces.LiveCardMenuActivity}: java.lang.NullPointerException 
      at android.app.ActivityThread.deliverResults(ActivityThread.java:3391) 
      at android.app.ActivityThread.handleSendResult(ActivityThread.java:3434) 
      at android.app.ActivityThread.access$1300(ActivityThread.java:138) 
      at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1284) 
      at android.os.Handler.dispatchMessage(Handler.java:102) 
      at android.os.Looper.loop(Looper.java:149) 
      at android.app.ActivityThread.main(ActivityThread.java:5045) 
      at java.lang.reflect.Method.invokeNative(Native Method) 
      at java.lang.reflect.Method.invoke(Method.java:515) 
      at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:786) 
      at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:602) 
      at dalvik.system.NativeStart.main(Native Method) 
    Caused by: java.lang.NullPointerException 
      at java.io.File.fixSlashes(File.java:185) 
      at java.io.File.<init>(File.java:134) 
      at com.example.cerveau.recognizeplaces.LiveCardMenuActivity.processPictureWhenReady(LiveCardMenuActivity.java:166) 
      at com.example.cerveau.recognizeplaces.LiveCardMenuActivity.onActivityResult(LiveCardMenuActivity.java:157) 
      at android.app.Activity.dispatchActivityResult(Activity.java:5430) 
      at android.app.ActivityThread.deliverResults(ActivityThread.java:3387) 
            at android.app.ActivityThread.handleSendResult(ActivityThread.java:3434) 
            at android.app.ActivityThread.access$1300(ActivityThread.java:138) 
            at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1284) 
            at android.os.Handler.dispatchMessage(Handler.java:102) 
            at android.os.Looper.loop(Looper.java:149) 
            at android.app.ActivityThread.main(ActivityThread.java:5045) 
            at java.lang.reflect.Method.invokeNative(Native Method) 
            at java.lang.reflect.Method.invoke(Method.java:515) 
            at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:786) 
            at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:602) 
            at dalvik.system.NativeStart.main(Native Method) 
10-30 18:01:09.130 11361-11361/com.example.cerveau.recognizeplaces I/Process﹕ Sending signal. PID: 11361 SIG: 9 

在此先感謝!

回答

0

您的processPictureWhenReady()方法從LiveCardMenuActivity類做錯了。

首先,這是一個非常糟糕的事情:

Thread.sleep(1000); 

永遠不要,不要,這樣做的UI線程。
您應該直接調用

mCamera.startPreview(); 
mCamera.takePicture(null, null, mPicture); 

,並在onPictureTaken()回調結束:

setIntent(pictureFile.getPath()); 
+0

感謝您的建議。我想他們並沒有什麼工作!我似乎無法在Google Glass上爲我找到一個Camera API實現,它可以工作......目前正在嘗試這一個:http://stackoverflow.com/questions/23073180/glass-slow-camera-fileobserver-notification-xe12 -using-action-image-capt 但是,相機預覽不會出現,它不會拍攝照片。我可能在MainActivity結束時做錯了什麼?我稱它爲一個意圖,然後我startActivityForResult。此外,這使用SurfaceView和我的應用程序顯示爲一個巨大的雲與一個皺眉臉... – choupettes 2014-11-27 06:18:16

+0

你看了玻璃開發指南? https://developers.google.com/glass/develop/gdk/camera 我用它來用玻璃設備拍照。 – 2015-01-11 00:56:55