2015-10-15 98 views
0

嗨,我已經這樣做了4天,但我無法解決它。我有一個類是相機預覽類,我在類中創建了一個獲取位圖函數,我想在另一個類中調用此函數來獲取位圖。Android Camera SurfaceView創建位圖

這是我調用獲得位映射功能編碼:

public class Stage extends GLSurfaceView { 

private float w, h; 
private int screenWidth, screenHeight; 
private String img; 
private boolean SC; 
private Bitmap screen, imgB; 
private boolean c; 
MyRenderer mRenderer; 
//here declare class of camera <<<<<<< 
CameraSurfaceView csv; 

public Stage(Context context, AttributeSet attrs) { 
    super(context, attrs); 
    setEGLConfigChooser(8, 8, 8, 8, 0, 0); 
    getHolder().setFormat(PixelFormat.TRANSPARENT); 
    setZOrderOnTop(true); 
    mRenderer = new MyRenderer(); 
    setRenderer(mRenderer); 
    setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); 

    //here the camera class declare......... 
    csv = new CameraSurfaceView(context); 
} 

public class MyRenderer implements GLSurfaceView.Renderer { 

    public void setSC(boolean yn){ 
     SC = yn; 
     requestRender(); 
    } 

    public void setC(boolean y){ 
     c=y; 
    } 

    public final void onDrawFrame(GL10 gl) { 
     gl.glClear(GLES10.GL_COLOR_BUFFER_BIT); 

     if(SC==true){ 

      if(c==true){ 
       //capture camera scene and convert to Bitmap 
       //here is the code of get bitmap for camera class 
       imgB = csv.getCameraBitmap(); 
      } 
      else{ 


      } 

      String file_path = Environment.getExternalStorageDirectory().getAbsolutePath() + "/OpenGL"; 
      File dir = new File(file_path); 
      if(!dir.exists()){ 
       dir.mkdirs(); 
      } 
      String format = new SimpleDateFormat("yyyyMMddHHmmss", java.util.Locale.getDefault()).format(new Date()); 
      File file = new File(file_path, format + ".png"); 
      FileOutputStream fOut; 
      try { 
       fOut = new FileOutputStream(file); 
       imgB.compress(Bitmap.CompressFormat.PNG, 85, fOut); 
       fOut.flush(); 
       fOut.close(); 
      } catch (Exception e) { 
       e.printStackTrace(); 
      } 

      SC = false; 
     } 
    } 

    public final void onSurfaceChanged(GL10 gl, int width, int height) { 
     gl.glClearColor(0, 0, 0, 0); 

     if(width > height) { 
      h = 600; 
      w = width * h/height; 
     } else { 
      w = 600; 
      h = height * w/width; 
     } 
     screenWidth = width; 
     screenHeight = height; 


     gl.glViewport(0, 0, screenWidth, screenHeight); 
     gl.glMatrixMode(GL10.GL_PROJECTION); 
     gl.glLoadIdentity(); 
     gl.glOrthof(0, w, h, 0, -1, 1); 
     gl.glMatrixMode(GL10.GL_MODELVIEW); 
     gl.glLoadIdentity(); 
    } 

    public final void onSurfaceCreated(GL10 gl, EGLConfig config) { 
     // Set up alpha blending 
     gl.glEnable(GL10.GL_ALPHA_TEST); 
     gl.glEnable(GL10.GL_BLEND); 
     gl.glBlendFunc(GL10.GL_ONE, GL10.GL_ONE_MINUS_SRC_ALPHA); 

     // We are in 2D. Why needs depth? 
     gl.glDisable(GL10.GL_DEPTH_TEST); 

     // Enable vertex arrays (we'll use them to draw primitives). 
     gl.glEnableClientState(GL10.GL_VERTEX_ARRAY); 

     // Enable texture coordination arrays. 
     gl.glEnableClientState(GL10.GL_TEXTURE_COORD_ARRAY); 

     tex.load(getContext()); 
    } 

} 

} 

這是我的相機類:

public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback { 
private static final String TAG = "CameraSurfaceView"; 

private SurfaceHolder mSurfaceHolder; 
private Camera mCamera = null; 
private Bitmap mBitmap; 
private Context mContext; 
private Camera.Parameters mParameters; 
private byte[] byteArray; 
private List<Camera.Size> mSupportedPreviewSizes; 
private Camera.Size mPreviewSize; 
Bitmap cameraBitmap; 

public CameraSurfaceView (Context context) { 
    this(context, null); 
} 

public CameraSurfaceView (Context context, AttributeSet attrs) { 
    this(context, attrs, 0); 
} 

public CameraSurfaceView (Context context, AttributeSet attrs, int defStyle) { 
    super(context, attrs, defStyle); 
    mContext = context; 

    try { 
     mSurfaceHolder = getHolder(); 
     mSurfaceHolder.addCallback(this); 
     mSurfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 

    } catch (Exception e) { 
     e.printStackTrace(); 
    } 
} 

@Override 
public void surfaceCreated(final SurfaceHolder surfaceHolder) { 
    if (mCamera == null) { 
     try { 
      mCamera = Camera.open(); 
     } catch (RuntimeException ignored) { 
     } 
    } 

    try { 
     if (mCamera != null) { 
      WindowManager winManager = (WindowManager) mContext.getSystemService(Context.WINDOW_SERVICE); 
      mCamera.setPreviewDisplay(mSurfaceHolder); 
     } 
    } catch (Exception e) { 
     if (mCamera != null) 
      mCamera.release(); 
     mCamera = null; 
    } 

    if (mCamera == null) { 
     return; 
    } else { 
     mCamera.setPreviewCallback(new Camera.PreviewCallback() { 
      @Override 
      public void onPreviewFrame(byte[] bytes, Camera camera) { 
       if (mParameters == null) 
       { 
        return; 
       } 
       byteArray = bytes; 
       cameraBitmap = getBitmap(); 
      } 
     }); 
    } 

    setWillNotDraw(false); 
} 

@Override 
public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) { 
    try { 
     mParameters = mCamera.getParameters(); 

     List<Camera.Size> cameraSize = mParameters.getSupportedPreviewSizes(); 
     mPreviewSize = cameraSize.get(0); 

     for (Camera.Size s : cameraSize) { 
      if ((s.width * s.height) > (mPreviewSize.width * mPreviewSize.height)) { 
       mPreviewSize = s; 
      } 
     } 

     mParameters.setPreviewSize(mPreviewSize.width, mPreviewSize.height); 
     mCamera.setParameters(mParameters); 
     mCamera.startPreview(); 

    } catch (Exception e) { 
     if (mCamera != null) { 
      mCamera.release(); 
      mCamera = null; 
     } 
    } 
} 


@Override 
public void surfaceDestroyed(SurfaceHolder surfaceHolder) { 
    if (mCamera != null) { 
     mCamera.setPreviewCallback(null); 
     mCamera.stopPreview(); 
     mCamera.release(); 
     mCamera = null; 
    } 
} 

public Bitmap getBitmap() { 
    try { 
     if (mParameters == null) 
      return null; 

     if (mPreviewSize == null) 
      return null; 

     int format = mParameters.getPreviewFormat(); 
     YuvImage yuvImage = new YuvImage(byteArray, format, mPreviewSize.width, mPreviewSize.height, null); 
     ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(); 

     Rect rect = new Rect(0, 0, mPreviewSize.width, mPreviewSize.height); 

     yuvImage.compressToJpeg(rect, 75, byteArrayOutputStream); 
     BitmapFactory.Options options = new BitmapFactory.Options(); 
     options.inPurgeable = true; 
     options.inInputShareable = true; 
     mBitmap = BitmapFactory.decodeByteArray(byteArrayOutputStream.toByteArray(), 0, byteArrayOutputStream.size(), options); 

     byteArrayOutputStream.flush(); 
     byteArrayOutputStream.close(); 
    } catch (IOException ioe) { 
     ioe.printStackTrace(); 
    } 

    return mBitmap; 
} 

public Bitmap getCameraBitmap() { 
    return cameraBitmap; 
} 
} 

任何指導,可以理解〜

回答

0

要開始工作相機,您必須顯示CameraSurfaceView - 創建它「在空中」是不夠的。請參閱最近的相關討論:Take a photo using a service on OnePlus One - using WindowManager hack

你不需要takePicture()部分,但等待預覽幀被克隆到byteArray是類似的異步。

底線是,您無法同步接收位圖。您可以請求位圖,並以回調形式交付,例如

CameraSurfaceView csv = new CameraSurfaceView(getContext()); 
((Activity) getContext()).addContentsView(csv); 
csv.requestBitmap(imageView); 

,並在某處CameraSurfaceView.java

public void onPreviewFrame(byte[] bytes, Camera camera) { 
    byteArray = bytes; 
    Bitmap imgB = getBitmap(); 
    imageView.setBitmap(imgB); 
} 
+0

我如何保存位圖的變量,所以我可以在其他類 –

+0

那麼使用它,你可以根據需要使用它 - 但你不能馬上得到它。 –

+0

如何做到這一點?我打電話getbitmap函數,因爲你上面我仍然得到空位圖 –