2016-11-10 125 views
2

嗯,我聽說有一個相機類已經在android studio上。正面相機面部檢測(Android Studio)

我在做什麼試圖做一個應用程序,打開相機,當它按下它和它唯一的前置攝像頭,然後當它檢測到它的臉它自己說「像你今天看起來很棒!我想記錄自己的聲音,並把它當作我自己的聲音。

所以,如果任何人都可以指導我如何做這樣的事情,這將是驚人的!我到處研究!

我已經下載了這個已經做成的面部檢測界面。它的作品,但我現在真正需要做的是增加我的聲音。

  1. 打開應用
  2. 等待,直到檢測到面部
  3. 說: 「你今天看起來不錯」

這就是我真正想要的!你能給我一些建議或至少給我一些步驟如何做到這一點?

我是個小白解釋

回答

1

時,只需使用Android原生的人臉檢測庫,以便把它簡單的(而不是OpenCV的或更復雜的東西),並儘快應用程序啓動,還可以跟蹤你的臉。 您還可以放入if語句來檢查臉部是否被識別,以及是否已經播放了人聲信息。

if(recognized_faces != 0 && message_already_played == false){ 
    //play the message 
} 

這是一段代碼(一個Activity),其需要從正面相機所有的黑影照片並對其進行分析。最初的代碼在照片上畫了一個畫布,並將它們按順序顯示(如它是一個視頻流)到「miaImmagine2」中。如果你不想要它,只是不顯示miaImmagine2並只顯示攝像機圖層。我已經刪除了許多不必要的代碼部分,所以可能會有很多代碼需要刪除或調整。但它可以爲你提供很好的幫助。所以這是代碼:

package mawashi.alex.driveawake; 

import android.annotation.SuppressLint; 
import android.app.Activity; 
import android.content.Context; 
import android.content.Intent; 
import android.graphics.Bitmap; 
import android.graphics.BitmapFactory; 
import android.graphics.Canvas; 
import android.graphics.Color; 
import android.graphics.ImageFormat; 
import android.graphics.Matrix; 
import android.graphics.Paint; 
import android.graphics.PointF; 
import android.graphics.Rect; 
import android.graphics.YuvImage; 
import android.hardware.Camera; 
import android.hardware.Camera.ErrorCallback; 
import android.hardware.Camera.PreviewCallback; 
import android.hardware.Camera.Size; 
import android.media.AudioManager; 
import android.media.FaceDetector; 
import android.media.MediaMetadataRetriever; 
import android.media.MediaPlayer; 
import android.net.Uri; 
import android.os.Bundle; 
import android.util.Log; 
import android.view.SurfaceHolder; 
import android.view.SurfaceHolder.Callback; 
import android.view.SurfaceView; 
import android.view.View; 
import android.view.Window; 
import android.widget.FrameLayout; 
import android.widget.ImageView; 
import android.widget.TextView; 
import android.widget.Toast; 

import java.io.ByteArrayOutputStream; 
import java.io.File; 
import java.io.FileInputStream; 
import java.io.IOException; 
import java.lang.reflect.Method; 
import java.util.List; 

@SuppressLint("NewApi") 
public class AwakeActivity extends Activity { 
    SurfaceView mVideoCaptureView; 

    Camera mCamera; 

    public Bitmap mFaceBitmap; 
    public TextView Testo; 
    public Bitmap mFace2Bitmap; 
    public ImageView miaImmagine2; 
    private MediaPlayer player; 
    public SurfaceView mSurfaceView; 
    public SurfaceHolder mSurfaceHolder; 
    private static final int MAX_FACES = 1; 
    int mFaceWidth; 
    int mFaceHeight; 
    int cameraType = 1; // front 
    int frame_sec = 1000000; 
    byte[] callbackBuffer; 
    PreviewCallback cb; 
    public FrameLayout preview; 
    Bitmap bmp; 
    Bitmap b; 
    MediaMetadataRetriever mediaMetadataRetriever=null; 
    Bitmap bmFrame = null; 
    FaceDetector.Face[] faces; 
    Activity activity; 
    Context mContext; 
    int index_sleep = 0; 
    int consec = 0; 
    boolean sleep = false; 
    public float scala = 1; 
    public int offset = 0; 
    int m = 1; 
    int Colore = Color.GRAY; 
    private MediaPlayer player2; 

    @Override 
    public void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     this.requestWindowFeature(Window.FEATURE_NO_TITLE); 
     setContentView(R.layout.detection); 
     mVideoCaptureView = (SurfaceView) findViewById(R.id.Surface); 
     miaImmagine2 = (ImageView) findViewById(R.id.imageView2); 
     Testo = (TextView) findViewById(R.id.textView1); 
     SurfaceHolder videoCaptureViewHolder = mVideoCaptureView.getHolder(); 
     videoCaptureViewHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS); 
     setVolumeControlStream(AudioManager.STREAM_MUSIC); 
     player = MediaPlayer.create(this, R.raw.you_look_good); 
     videoCaptureViewHolder.addCallback(new Callback() { 
      public void surfaceDestroyed(SurfaceHolder holder) { 
     } 

     public void surfaceCreated(SurfaceHolder holder) { 
      startVideo(); 
     } 

     public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {} 
     }); 
    } 

    //////////////////////////////////////////////////////////////////////////////////////// 
    //************BUTTONS*************************************************************** 



    //ROTATE THE IMAGE OF 90 DEGREES 

    protected void setDisplayOrientation(Camera mCamera, int angle){ 
     Method downPolymorphic; 
     try{ 
      downPolymorphic = mCamera.getClass().getMethod("setDisplayOrientation", new Class[] { int.class }); 
      if (downPolymorphic != null) 
       downPolymorphic.invoke(mCamera, new Object[] { angle }); 
     }catch (Exception e1){ } 
    } 

    ///////////////////////////////////////////////////////////////////////////////////////////// 
    //****************************************************************************************** 

    @Override 
    public void onPause(){ 
     super.onPause(); 
     player.pause(); 
    } 

    //************************************************************************************** 
    //*******************************STARTVIDEO********************************************* 
    @SuppressLint("NewApi") 
    private void startVideo() { 
     SurfaceHolder videoCaptureViewHolder = null; 
     try { 
      mCamera = Camera.open(1); 
     } catch (RuntimeException e) { 
      Log.e("CameraTest", "Camera Open filed"); 
      return; 
     } 
     mCamera.setErrorCallback(new ErrorCallback() { 
      public void onError(int error, Camera camera) { 
      } 
     }); 
     Camera.Parameters parameters = mCamera.getParameters(); 
     List<Camera.Size> previewSizes = parameters.getSupportedPreviewSizes(); 
     Camera.Size cs = previewSizes.get(0); 
     parameters.setPreviewSize(cs.width,cs.height); 


     setDisplayOrientation(mCamera, 90); 
     mCamera.setParameters(parameters); 
     if (null != mVideoCaptureView) 
      videoCaptureViewHolder = mVideoCaptureView.getHolder(); 
     try { 
      mCamera.setPreviewDisplay(videoCaptureViewHolder); 
     } catch (Throwable t) { 
     } 
     Log.v("CameraTest","Camera PreviewFrameRate = "+mCamera.getParameters().getPreviewFrameRate()); 
     Size previewSize=mCamera.getParameters().getPreviewSize(); 
     int dataBufferSize=(int)(previewSize.height*previewSize.width* (ImageFormat.getBitsPerPixel(mCamera.getParameters().getPreviewFormat())/8.0)); 
     mCamera.addCallbackBuffer(new byte[dataBufferSize]); 
     mCamera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() { 
      private long timestamp=0; 
      public synchronized void onPreviewFrame(byte[] data, Camera camera) { 
       Size previewSize=camera.getParameters().getPreviewSize(); 
       YuvImage yuvImage= new YuvImage(data,ImageFormat.NV21, previewSize.width, previewSize.height, null); 
       ByteArrayOutputStream baos = new ByteArrayOutputStream(); 
       yuvImage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height),80, baos); 
       byte jpgData[]=baos.toByteArray(); 
       bmp = BitmapFactory.decodeByteArray(jpgData, 0, jpgData.length); 
       Bitmap bmp2 = Bitmap.createBitmap(bmp.getWidth(), bmp.getHeight(), Bitmap.Config.RGB_565); 
       Canvas canvas=new Canvas(bmp2); 
       Paint paint=new Paint(); 
       paint.setColor(Color.RED); 
       paint.setStyle(Paint.Style.STROKE); 
       paint.setStrokeWidth(2); 
       Matrix matrix = new Matrix(); 
       matrix.setRotate(270,bmp.getWidth()/2,bmp.getHeight()/2); 
       canvas.drawBitmap(bmp, matrix, paint); 
       faces = new FaceDetector.Face[MAX_FACES]; 
       int mFaceWidth = bmp2.getWidth(); 
       int mFaceHeight = bmp2.getHeight(); 
       PointF midPoint = new PointF();  //inizializza Punto di coordinate float 
       FaceDetector detector = new FaceDetector(mFaceWidth, mFaceHeight,MAX_FACES); 
       int facesFound = detector.findFaces(bmp2, faces); //bmp 

       if(facesFound > 0 && message_started==false){ 
        faces[0].getMidPoint(midPoint); 
        float eyeDistance = faces[0].eyesDistance(); 
        float confidence = faces[0].confidence(); 
        player.start(); //play of "You look good today!" 
        message_started = true; 
       } 
       miaImmagine2.setImageBitmap(bmp2); 
       //+++++++++++++++++++++PARTE FINALE++++++++++++++++++++++++++++++++++++++++ 
       try{ 
        camera.addCallbackBuffer(data); 
       }catch (Exception e) { 
        Log.e("CameraTest", "addCallbackBuffer error"); 
        return; 
       } 
       return; 
      } 
     }); 
     try { 
      mCamera.startPreview(); 
     } catch (Throwable e) { 
      mCamera.release(); 
      mCamera = null; 
      return; 
     } 

    } 
    //*************************************STARTVIDEO_END*************************************** 
    //////////////////////////////////////////////////////////////////////////////////////////// 
    //*************************************STOPVIDEO******************************************** 


    private void stopVideo() { 
     if(null==mCamera) 
      return; 
     try { 
      mCamera.stopPreview(); 
      mCamera.setPreviewDisplay(null); 
      mCamera.setPreviewCallbackWithBuffer(null); 
      mCamera.release(); 
     } catch (IOException e) { 
      e.printStackTrace(); 
      return; 
     } 
     mCamera = null; 
    } 
    //**********************************STOPVIDEO_END******************************************** 
    ////////////////////////////////////////////////////////////////////////////////////////////// 
    //**********************************FINISH*************************************************** 
    public void finish(){ 
     stopVideo(); 
     super.finish(); 
    }; 
    //**********************************FINISH_END*********************************************** 


    } 
+0

我可以使用類似的東西嗎? http://android-er.blogspot.co.uk/2012/04/face-detection-for-camera.html –

+0

是的,這正是我已經使用過的人臉檢測器,它完美的工作。這裏是文檔:https://developer.android.com/reference/android/media/FaceDetector.Face.html如果你測試一個解決方案,只是檢測你的臉,然後讓我知道 –

+0

太棒了!我試試那個,但我已經有一個我想告訴你,看看你說什麼。你有任何網絡讓我聯繫你?像Skype?這將是更容易 –